Compare commits
1 Commits
trunk
...
revert-379
Author | SHA1 | Date |
---|---|---|
Wei-Chiu Chuang | 6465afe5e8 |
|
@ -14,8 +14,6 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
github:
|
github:
|
||||||
ghp_path: /
|
|
||||||
ghp_branch: gh-pages
|
|
||||||
enabled_merge_buttons:
|
enabled_merge_buttons:
|
||||||
squash: true
|
squash: true
|
||||||
merge: false
|
merge: false
|
||||||
|
@ -24,4 +22,4 @@ notifications:
|
||||||
commits: common-commits@hadoop.apache.org
|
commits: common-commits@hadoop.apache.org
|
||||||
issues: common-issues@hadoop.apache.org
|
issues: common-issues@hadoop.apache.org
|
||||||
pullrequests: common-issues@hadoop.apache.org
|
pullrequests: common-issues@hadoop.apache.org
|
||||||
jira_options: comment link label
|
jira_options: link label worklog
|
|
@ -1,59 +0,0 @@
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
# contributor license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright ownership.
|
|
||||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
# (the "License"); you may not use this file except in compliance with
|
|
||||||
# the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
name: website
|
|
||||||
|
|
||||||
# Controls when the action will run.
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ trunk ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Hadoop trunk
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: apache/hadoop
|
|
||||||
- name: Set up JDK 8
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: '8'
|
|
||||||
distribution: 'temurin'
|
|
||||||
- name: Cache local Maven repository
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.m2/repository
|
|
||||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-maven-
|
|
||||||
- name: Build Hadoop maven plugins
|
|
||||||
run: cd hadoop-maven-plugins && mvn --batch-mode install
|
|
||||||
- name: Build Hadoop
|
|
||||||
run: mvn clean install -DskipTests -DskipShade
|
|
||||||
- name: Build document
|
|
||||||
run: mvn clean site
|
|
||||||
- name: Stage document
|
|
||||||
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
uses: peaceiris/actions-gh-pages@v3
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: ./staging/hadoop-project
|
|
||||||
user_name: 'github-actions[bot]'
|
|
||||||
user_email: 'github-actions[bot]@users.noreply.github.com'
|
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
dev-support/docker/Dockerfile_windows_10
|
|
115
BUILDING.txt
115
BUILDING.txt
|
@ -57,7 +57,7 @@ Refer to dev-support/docker/Dockerfile):
|
||||||
|
|
||||||
* Open JDK 1.8
|
* Open JDK 1.8
|
||||||
$ sudo apt-get update
|
$ sudo apt-get update
|
||||||
$ sudo apt-get -y install openjdk-8-jdk
|
$ sudo apt-get -y install java-8-openjdk
|
||||||
* Maven
|
* Maven
|
||||||
$ sudo apt-get -y install maven
|
$ sudo apt-get -y install maven
|
||||||
* Native libraries
|
* Native libraries
|
||||||
|
@ -492,66 +492,39 @@ Building on CentOS 8
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
|
|
||||||
Building on Windows 10
|
Building on Windows
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
Requirements:
|
Requirements:
|
||||||
|
|
||||||
* Windows 10
|
* Windows System
|
||||||
* JDK 1.8
|
* JDK 1.8
|
||||||
* Maven 3.0 or later (maven.apache.org)
|
* Maven 3.0 or later
|
||||||
* Boost 1.72 (boost.org)
|
* Boost 1.72
|
||||||
* Protocol Buffers 3.7.1 (https://github.com/protocolbuffers/protobuf/releases)
|
* Protocol Buffers 3.7.1
|
||||||
* CMake 3.19 or newer (cmake.org)
|
* CMake 3.19 or newer
|
||||||
* Visual Studio 2019 (visualstudio.com)
|
* Visual Studio 2010 Professional or Higher
|
||||||
* Windows SDK 8.1 (optional, if building CPU rate control for the container executor. Get this from
|
* Windows SDK 8.1 (if building CPU rate control for the container executor)
|
||||||
http://msdn.microsoft.com/en-us/windows/bg162891.aspx)
|
* zlib headers (if building native code bindings for zlib)
|
||||||
* Zlib (zlib.net, if building native code bindings for zlib)
|
|
||||||
* Git (preferably, get this from https://git-scm.com/download/win since the package also contains
|
|
||||||
Unix command-line tools that are needed during packaging).
|
|
||||||
* Python (python.org, for generation of docs using 'mvn site')
|
|
||||||
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
|
||||||
|
* Unix command-line tools from GnuWin32: sh, mkdir, rm, cp, tar, gzip. These
|
||||||
|
tools must be present on your PATH.
|
||||||
|
* Python ( for generation of docs using 'mvn site')
|
||||||
|
|
||||||
|
Unix command-line tools are also included with the Windows Git package which
|
||||||
|
can be downloaded from http://git-scm.com/downloads
|
||||||
|
|
||||||
|
If using Visual Studio, it must be Professional level or higher.
|
||||||
|
Do not use Visual Studio Express. It does not support compiling for 64-bit,
|
||||||
|
which is problematic if running a 64-bit system.
|
||||||
|
|
||||||
|
The Windows SDK 8.1 is available to download at:
|
||||||
|
|
||||||
|
http://msdn.microsoft.com/en-us/windows/bg162891.aspx
|
||||||
|
|
||||||
|
Cygwin is not required.
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
|
|
||||||
Building guidelines:
|
|
||||||
|
|
||||||
Hadoop repository provides the Dockerfile for building Hadoop on Windows 10, located at
|
|
||||||
dev-support/docker/Dockerfile_windows_10. It is highly recommended to use this and create the
|
|
||||||
Docker image for building Hadoop on Windows 10, since you don't have to install anything else
|
|
||||||
other than Docker and no additional steps are required in terms of aligning the environment with
|
|
||||||
the necessary paths etc.
|
|
||||||
|
|
||||||
However, if you still prefer taking the route of not using Docker, this Dockerfile_windows_10 will
|
|
||||||
still be immensely useful as a raw guide for all the steps involved in creating the environment
|
|
||||||
needed to build Hadoop on Windows 10.
|
|
||||||
|
|
||||||
Building using the Docker:
|
|
||||||
We first need to build the Docker image for building Hadoop on Windows 10. Run this command from
|
|
||||||
the root of the Hadoop repository.
|
|
||||||
> docker build -t hadoop-windows-10-builder -f .\dev-support\docker\Dockerfile_windows_10 .\dev-support\docker\
|
|
||||||
|
|
||||||
Start the container with the image that we just built.
|
|
||||||
> docker run --rm -it hadoop-windows-10-builder
|
|
||||||
|
|
||||||
You can now clone the Hadoop repo inside this container and proceed with the build.
|
|
||||||
|
|
||||||
NOTE:
|
|
||||||
While one may perceive the idea of mounting the locally cloned (on the host filesystem) Hadoop
|
|
||||||
repository into the container (using the -v option), we have seen the build to fail owing to some
|
|
||||||
files not being able to be located by Maven. Thus, we suggest cloning the Hadoop repository to a
|
|
||||||
non-mounted folder inside the container and proceed with the build. When the build is completed,
|
|
||||||
you may use the "docker cp" command to copy the built Hadoop tar.gz file from the docker container
|
|
||||||
to the host filesystem. If you still would like to mount the Hadoop codebase, a workaround would
|
|
||||||
be to copy the mounted Hadoop codebase into another folder (which doesn't point to a mount) in the
|
|
||||||
container's filesystem and use this for building.
|
|
||||||
|
|
||||||
However, we noticed no build issues when the Maven repository from the host filesystem was mounted
|
|
||||||
into the container. One may use this to greatly reduce the build time. Assuming that the Maven
|
|
||||||
repository is located at D:\Maven\Repository in the host filesystem, one can use the following
|
|
||||||
command to mount the same onto the default Maven repository location while launching the container.
|
|
||||||
> docker run --rm -v D:\Maven\Repository:C:\Users\ContainerAdministrator\.m2\repository -it hadoop-windows-10-builder
|
|
||||||
|
|
||||||
Building:
|
Building:
|
||||||
|
|
||||||
Keep the source code tree in a short path to avoid running into problems related
|
Keep the source code tree in a short path to avoid running into problems related
|
||||||
|
@ -567,24 +540,6 @@ configure the bit-ness of the build, and set several optional components.
|
||||||
Several tests require that the user must have the Create Symbolic Links
|
Several tests require that the user must have the Create Symbolic Links
|
||||||
privilege.
|
privilege.
|
||||||
|
|
||||||
To simplify the installation of Boost, Protocol buffers, OpenSSL and Zlib dependencies we can use
|
|
||||||
vcpkg (https://github.com/Microsoft/vcpkg.git). Upon cloning the vcpkg repo, checkout the commit
|
|
||||||
7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d to get the required versions of the dependencies
|
|
||||||
mentioned above.
|
|
||||||
> git clone https://github.com/Microsoft/vcpkg.git
|
|
||||||
> cd vcpkg
|
|
||||||
> git checkout 7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d
|
|
||||||
> .\bootstrap-vcpkg.bat
|
|
||||||
> .\vcpkg.exe install boost:x64-windows
|
|
||||||
> .\vcpkg.exe install protobuf:x64-windows
|
|
||||||
> .\vcpkg.exe install openssl:x64-windows
|
|
||||||
> .\vcpkg.exe install zlib:x64-windows
|
|
||||||
|
|
||||||
Set the following environment variables -
|
|
||||||
(Assuming that vcpkg was checked out at C:\vcpkg)
|
|
||||||
> set PROTOBUF_HOME=C:\vcpkg\installed\x64-windows
|
|
||||||
> set MAVEN_OPTS=-Xmx2048M -Xss128M
|
|
||||||
|
|
||||||
All Maven goals are the same as described above with the exception that
|
All Maven goals are the same as described above with the exception that
|
||||||
native code is built by enabling the 'native-win' Maven profile. -Pnative-win
|
native code is built by enabling the 'native-win' Maven profile. -Pnative-win
|
||||||
is enabled by default when building on Windows since the native components
|
is enabled by default when building on Windows since the native components
|
||||||
|
@ -602,24 +557,6 @@ the zlib 1.2.7 source tree.
|
||||||
|
|
||||||
http://www.zlib.net/
|
http://www.zlib.net/
|
||||||
|
|
||||||
|
|
||||||
Build command:
|
|
||||||
The following command builds all the modules in the Hadoop project and generates the tar.gz file in
|
|
||||||
hadoop-dist/target upon successful build. Run these commands from an
|
|
||||||
"x64 Native Tools Command Prompt for VS 2019" which can be found under "Visual Studio 2019" in the
|
|
||||||
Windows start menu. If you're using the Docker image from Dockerfile_windows_10, you'll be
|
|
||||||
logged into "x64 Native Tools Command Prompt for VS 2019" automatically when you start the
|
|
||||||
container.
|
|
||||||
|
|
||||||
> set classpath=
|
|
||||||
> set PROTOBUF_HOME=C:\vcpkg\installed\x64-windows
|
|
||||||
> mvn clean package -Dhttps.protocols=TLSv1.2 -DskipTests -DskipDocs -Pnative-win,dist^
|
|
||||||
-Drequire.openssl -Drequire.test.libhadoop -Pyarn-ui -Dshell-executable=C:\Git\bin\bash.exe^
|
|
||||||
-Dtar -Dopenssl.prefix=C:\vcpkg\installed\x64-windows^
|
|
||||||
-Dcmake.prefix.path=C:\vcpkg\installed\x64-windows^
|
|
||||||
-Dwindows.cmake.toolchain.file=C:\vcpkg\scripts\buildsystems\vcpkg.cmake -Dwindows.cmake.build.type=RelWithDebInfo^
|
|
||||||
-Dwindows.build.hdfspp.dll=off -Dwindows.no.sasl=on -Duse.platformToolsetVersion=v142
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
Building distributions:
|
Building distributions:
|
||||||
|
|
||||||
|
|
195
LICENSE-binary
195
LICENSE-binary
|
@ -210,28 +210,28 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java
|
||||||
|
|
||||||
com.aliyun:aliyun-java-sdk-core:4.5.10
|
com.aliyun:aliyun-java-sdk-core:3.4.0
|
||||||
com.aliyun:aliyun-java-sdk-kms:2.11.0
|
com.aliyun:aliyun-java-sdk-ecs:4.2.0
|
||||||
com.aliyun:aliyun-java-sdk-ram:3.1.0
|
com.aliyun:aliyun-java-sdk-ram:3.0.0
|
||||||
com.aliyun:aliyun-java-sdk-sts:3.0.0
|
com.aliyun:aliyun-java-sdk-sts:3.0.0
|
||||||
com.aliyun.oss:aliyun-sdk-oss:3.13.2
|
com.aliyun.oss:aliyun-sdk-oss:3.13.0
|
||||||
com.amazonaws:aws-java-sdk-bundle:1.12.316
|
com.amazonaws:aws-java-sdk-bundle:1.11.901
|
||||||
com.cedarsoftware:java-util:1.9.0
|
com.cedarsoftware:java-util:1.9.0
|
||||||
com.cedarsoftware:json-io:2.5.1
|
com.cedarsoftware:json-io:2.5.1
|
||||||
com.fasterxml.jackson.core:jackson-annotations:2.12.7
|
com.fasterxml.jackson.core:jackson-annotations:2.13.0
|
||||||
com.fasterxml.jackson.core:jackson-core:2.12.7
|
com.fasterxml.jackson.core:jackson-core:2.13.0
|
||||||
com.fasterxml.jackson.core:jackson-databind:2.12.7.1
|
com.fasterxml.jackson.core:jackson-databind:2.13.0
|
||||||
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.12.7
|
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.13.0
|
||||||
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.12.7
|
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.13.0
|
||||||
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.12.7
|
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.13.0
|
||||||
com.fasterxml.uuid:java-uuid-generator:3.1.4
|
com.fasterxml.uuid:java-uuid-generator:3.1.4
|
||||||
com.fasterxml.woodstox:woodstox-core:5.4.0
|
com.fasterxml.woodstox:woodstox-core:5.3.0
|
||||||
com.github.davidmoten:rxjava-extras:0.8.0.17
|
com.github.davidmoten:rxjava-extras:0.8.0.17
|
||||||
com.github.stephenc.jcip:jcip-annotations:1.0-1
|
com.github.stephenc.jcip:jcip-annotations:1.0-1
|
||||||
com.google:guice:4.0
|
com.google:guice:4.0
|
||||||
com.google:guice-servlet:4.0
|
com.google:guice-servlet:4.0
|
||||||
com.google.api.grpc:proto-google-common-protos:1.0.0
|
com.google.api.grpc:proto-google-common-protos:1.0.0
|
||||||
com.google.code.gson:2.9.0
|
com.google.code.gson:2.2.4
|
||||||
com.google.errorprone:error_prone_annotations:2.2.0
|
com.google.errorprone:error_prone_annotations:2.2.0
|
||||||
com.google.j2objc:j2objc-annotations:1.1
|
com.google.j2objc:j2objc-annotations:1.1
|
||||||
com.google.json-simple:json-simple:1.1.1
|
com.google.json-simple:json-simple:1.1.1
|
||||||
|
@ -240,17 +240,18 @@ com.google.guava:guava:20.0
|
||||||
com.google.guava:guava:27.0-jre
|
com.google.guava:guava:27.0-jre
|
||||||
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
|
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
|
||||||
com.microsoft.azure:azure-storage:7.0.0
|
com.microsoft.azure:azure-storage:7.0.0
|
||||||
com.nimbusds:nimbus-jose-jwt:9.31
|
com.nimbusds:nimbus-jose-jwt:9.8.1
|
||||||
com.squareup.okhttp3:okhttp:4.10.0
|
com.squareup.okhttp:okhttp:2.7.5
|
||||||
com.squareup.okio:okio:3.2.0
|
com.squareup.okio:okio:1.6.0
|
||||||
com.zaxxer:HikariCP:4.0.3
|
com.zaxxer:HikariCP:4.0.3
|
||||||
commons-beanutils:commons-beanutils:1.9.4
|
commons-beanutils:commons-beanutils:1.9.3
|
||||||
commons-cli:commons-cli:1.5.0
|
commons-cli:commons-cli:1.2
|
||||||
commons-codec:commons-codec:1.11
|
commons-codec:commons-codec:1.11
|
||||||
commons-collections:commons-collections:3.2.2
|
commons-collections:commons-collections:3.2.2
|
||||||
commons-daemon:commons-daemon:1.0.13
|
commons-daemon:commons-daemon:1.0.13
|
||||||
commons-io:commons-io:2.8.0
|
commons-io:commons-io:2.8.0
|
||||||
commons-net:commons-net:3.9.0
|
commons-logging:commons-logging:1.1.3
|
||||||
|
commons-net:commons-net:3.6
|
||||||
de.ruedigermoeller:fst:2.50
|
de.ruedigermoeller:fst:2.50
|
||||||
io.grpc:grpc-api:1.26.0
|
io.grpc:grpc-api:1.26.0
|
||||||
io.grpc:grpc-context:1.26.0
|
io.grpc:grpc-context:1.26.0
|
||||||
|
@ -259,36 +260,18 @@ io.grpc:grpc-netty:1.26.0
|
||||||
io.grpc:grpc-protobuf:1.26.0
|
io.grpc:grpc-protobuf:1.26.0
|
||||||
io.grpc:grpc-protobuf-lite:1.26.0
|
io.grpc:grpc-protobuf-lite:1.26.0
|
||||||
io.grpc:grpc-stub:1.26.0
|
io.grpc:grpc-stub:1.26.0
|
||||||
io.netty:netty-all:4.1.77.Final
|
io.netty:netty:3.10.6.Final
|
||||||
io.netty:netty-buffer:4.1.77.Final
|
io.netty:netty-all:4.1.42.Final
|
||||||
io.netty:netty-codec:4.1.77.Final
|
io.netty:netty-buffer:4.1.27.Final
|
||||||
io.netty:netty-codec-dns:4.1.77.Final
|
io.netty:netty-codec:4.1.27.Final
|
||||||
io.netty:netty-codec-haproxy:4.1.77.Final
|
io.netty:netty-codec-http:4.1.27.Final
|
||||||
io.netty:netty-codec-http:4.1.77.Final
|
io.netty:netty-codec-http2:4.1.27.Final
|
||||||
io.netty:netty-codec-http2:4.1.77.Final
|
io.netty:netty-codec-socks:4.1.27.Final
|
||||||
io.netty:netty-codec-memcache:4.1.77.Final
|
io.netty:netty-common:4.1.27.Final
|
||||||
io.netty:netty-codec-mqtt:4.1.77.Final
|
io.netty:netty-handler:4.1.27.Final
|
||||||
io.netty:netty-codec-redis:4.1.77.Final
|
io.netty:netty-handler-proxy:4.1.27.Final
|
||||||
io.netty:netty-codec-smtp:4.1.77.Final
|
io.netty:netty-resolver:4.1.27.Final
|
||||||
io.netty:netty-codec-socks:4.1.77.Final
|
io.netty:netty-transport:4.1.27.Final
|
||||||
io.netty:netty-codec-stomp:4.1.77.Final
|
|
||||||
io.netty:netty-codec-xml:4.1.77.Final
|
|
||||||
io.netty:netty-common:4.1.77.Final
|
|
||||||
io.netty:netty-handler:4.1.77.Final
|
|
||||||
io.netty:netty-handler-proxy:4.1.77.Final
|
|
||||||
io.netty:netty-resolver:4.1.77.Final
|
|
||||||
io.netty:netty-resolver-dns:4.1.77.Final
|
|
||||||
io.netty:netty-transport:4.1.77.Final
|
|
||||||
io.netty:netty-transport-rxtx:4.1.77.Final
|
|
||||||
io.netty:netty-transport-sctp:4.1.77.Final
|
|
||||||
io.netty:netty-transport-udt:4.1.77.Final
|
|
||||||
io.netty:netty-transport-classes-epoll:4.1.77.Final
|
|
||||||
io.netty:netty-transport-native-unix-common:4.1.77.Final
|
|
||||||
io.netty:netty-transport-classes-kqueue:4.1.77.Final
|
|
||||||
io.netty:netty-resolver-dns-classes-macos:4.1.77.Final
|
|
||||||
io.netty:netty-transport-native-epoll:4.1.77.Final
|
|
||||||
io.netty:netty-transport-native-kqueue:4.1.77.Final
|
|
||||||
io.netty:netty-resolver-dns-native-macos:4.1.77.Final
|
|
||||||
io.opencensus:opencensus-api:0.12.3
|
io.opencensus:opencensus-api:0.12.3
|
||||||
io.opencensus:opencensus-contrib-grpc-metrics:0.12.3
|
io.opencensus:opencensus-contrib-grpc-metrics:0.12.3
|
||||||
io.reactivex:rxjava:1.3.8
|
io.reactivex:rxjava:1.3.8
|
||||||
|
@ -299,15 +282,16 @@ javax.inject:javax.inject:1
|
||||||
log4j:log4j:1.2.17
|
log4j:log4j:1.2.17
|
||||||
net.java.dev.jna:jna:5.2.0
|
net.java.dev.jna:jna:5.2.0
|
||||||
net.minidev:accessors-smart:1.2
|
net.minidev:accessors-smart:1.2
|
||||||
org.apache.avro:avro:1.9.2
|
net.minidev:json-smart:2.4.7
|
||||||
|
org.apache.avro:avro:1.7.7
|
||||||
org.apache.commons:commons-collections4:4.2
|
org.apache.commons:commons-collections4:4.2
|
||||||
org.apache.commons:commons-compress:1.21
|
org.apache.commons:commons-compress:1.21
|
||||||
org.apache.commons:commons-configuration2:2.8.0
|
org.apache.commons:commons-configuration2:2.1.1
|
||||||
org.apache.commons:commons-csv:1.9.0
|
org.apache.commons:commons-csv:1.0
|
||||||
org.apache.commons:commons-digester:1.8.1
|
org.apache.commons:commons-digester:1.8.1
|
||||||
org.apache.commons:commons-lang3:3.12.0
|
org.apache.commons:commons-lang3:3.12.0
|
||||||
org.apache.commons:commons-math3:3.6.1
|
org.apache.commons:commons-math3:3.1.1
|
||||||
org.apache.commons:commons-text:1.10.0
|
org.apache.commons:commons-text:1.4
|
||||||
org.apache.commons:commons-validator:1.6
|
org.apache.commons:commons-validator:1.6
|
||||||
org.apache.curator:curator-client:5.2.0
|
org.apache.curator:curator-client:5.2.0
|
||||||
org.apache.curator:curator-framework:5.2.0
|
org.apache.curator:curator-framework:5.2.0
|
||||||
|
@ -321,49 +305,49 @@ org.apache.htrace:htrace-core:3.1.0-incubating
|
||||||
org.apache.htrace:htrace-core4:4.1.0-incubating
|
org.apache.htrace:htrace-core4:4.1.0-incubating
|
||||||
org.apache.httpcomponents:httpclient:4.5.6
|
org.apache.httpcomponents:httpclient:4.5.6
|
||||||
org.apache.httpcomponents:httpcore:4.4.10
|
org.apache.httpcomponents:httpcore:4.4.10
|
||||||
org.apache.kafka:kafka-clients:2.8.2
|
org.apache.kafka:kafka-clients:2.8.1
|
||||||
org.apache.kerby:kerb-admin:2.0.3
|
org.apache.kerby:kerb-admin:1.0.1
|
||||||
org.apache.kerby:kerb-client:2.0.3
|
org.apache.kerby:kerb-client:1.0.1
|
||||||
org.apache.kerby:kerb-common:2.0.3
|
org.apache.kerby:kerb-common:1.0.1
|
||||||
org.apache.kerby:kerb-core:2.0.3
|
org.apache.kerby:kerb-core:1.0.1
|
||||||
org.apache.kerby:kerb-crypto:2.0.3
|
org.apache.kerby:kerb-crypto:1.0.1
|
||||||
org.apache.kerby:kerb-identity:2.0.3
|
org.apache.kerby:kerb-identity:1.0.1
|
||||||
org.apache.kerby:kerb-server:2.0.3
|
org.apache.kerby:kerb-server:1.0.1
|
||||||
org.apache.kerby:kerb-simplekdc:2.0.3
|
org.apache.kerby:kerb-simplekdc:1.0.1
|
||||||
org.apache.kerby:kerb-util:2.0.3
|
org.apache.kerby:kerb-util:1.0.1
|
||||||
org.apache.kerby:kerby-asn1:2.0.3
|
org.apache.kerby:kerby-asn1:1.0.1
|
||||||
org.apache.kerby:kerby-config:2.0.3
|
org.apache.kerby:kerby-config:1.0.1
|
||||||
org.apache.kerby:kerby-pkix:2.0.3
|
org.apache.kerby:kerby-pkix:1.0.1
|
||||||
org.apache.kerby:kerby-util:2.0.3
|
org.apache.kerby:kerby-util:1.0.1
|
||||||
org.apache.kerby:kerby-xdr:2.0.3
|
org.apache.kerby:kerby-xdr:1.0.1
|
||||||
org.apache.kerby:token-provider:2.0.3
|
org.apache.kerby:token-provider:1.0.1
|
||||||
org.apache.solr:solr-solrj:8.8.2
|
|
||||||
org.apache.yetus:audience-annotations:0.5.0
|
org.apache.yetus:audience-annotations:0.5.0
|
||||||
org.apache.zookeeper:zookeeper:3.6.3
|
org.apache.zookeeper:zookeeper:3.6.3
|
||||||
org.codehaus.jettison:jettison:1.5.4
|
org.codehaus.jackson:jackson-core-asl:1.9.13
|
||||||
org.eclipse.jetty:jetty-annotations:9.4.51.v20230217
|
org.codehaus.jackson:jackson-jaxrs:1.9.13
|
||||||
org.eclipse.jetty:jetty-http:9.4.51.v20230217
|
org.codehaus.jackson:jackson-mapper-asl:1.9.13
|
||||||
org.eclipse.jetty:jetty-io:9.4.51.v20230217
|
org.codehaus.jackson:jackson-xc:1.9.13
|
||||||
org.eclipse.jetty:jetty-jndi:9.4.51.v20230217
|
org.codehaus.jettison:jettison:1.1
|
||||||
org.eclipse.jetty:jetty-plus:9.4.51.v20230217
|
org.eclipse.jetty:jetty-annotations:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-security:9.4.51.v20230217
|
org.eclipse.jetty:jetty-http:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-server:9.4.51.v20230217
|
org.eclipse.jetty:jetty-io:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-servlet:9.4.51.v20230217
|
org.eclipse.jetty:jetty-jndi:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-util:9.4.51.v20230217
|
org.eclipse.jetty:jetty-plus:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217
|
org.eclipse.jetty:jetty-security:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-webapp:9.4.51.v20230217
|
org.eclipse.jetty:jetty-server:9.4.44.v20210927
|
||||||
org.eclipse.jetty:jetty-xml:9.4.51.v20230217
|
org.eclipse.jetty:jetty-servlet:9.4.44.v20210927
|
||||||
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.51.v20230217
|
org.eclipse.jetty:jetty-util:9.4.44.v20210927
|
||||||
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217
|
org.eclipse.jetty:jetty-util-ajax:9.4.44.v20210927
|
||||||
|
org.eclipse.jetty:jetty-webapp:9.4.44.v20210927
|
||||||
|
org.eclipse.jetty:jetty-xml:9.4.44.v20210927
|
||||||
|
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.44.v20210927
|
||||||
|
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.44.v20210927
|
||||||
org.ehcache:ehcache:3.3.1
|
org.ehcache:ehcache:3.3.1
|
||||||
org.ini4j:ini4j:0.5.4
|
|
||||||
org.jetbrains.kotlin:kotlin-stdlib:1.4.10
|
|
||||||
org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
|
|
||||||
org.lz4:lz4-java:1.7.1
|
org.lz4:lz4-java:1.7.1
|
||||||
org.objenesis:objenesis:2.6
|
org.objenesis:objenesis:2.6
|
||||||
org.xerial.snappy:snappy-java:1.0.5
|
org.xerial.snappy:snappy-java:1.0.5
|
||||||
org.yaml:snakeyaml:2.0
|
org.yaml:snakeyaml:1.16:
|
||||||
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
|
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
|
||||||
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
@ -420,14 +404,14 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.css
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.6.0.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.5.1.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
|
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
|
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
|
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
|
||||||
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
|
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.11.5/*
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.10.18/*
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL
|
||||||
|
@ -435,7 +419,7 @@ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanage
|
||||||
bootstrap v3.3.6
|
bootstrap v3.3.6
|
||||||
broccoli-asset-rev v2.4.2
|
broccoli-asset-rev v2.4.2
|
||||||
broccoli-funnel v1.0.1
|
broccoli-funnel v1.0.1
|
||||||
datatables v1.11.5
|
datatables v1.10.8
|
||||||
em-helpers v0.5.13
|
em-helpers v0.5.13
|
||||||
em-table v0.1.6
|
em-table v0.1.6
|
||||||
ember v2.2.0
|
ember v2.2.0
|
||||||
|
@ -483,8 +467,8 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5
|
||||||
com.microsoft.azure:azure-data-lake-store-sdk:2.3.3
|
com.microsoft.azure:azure-data-lake-store-sdk:2.3.3
|
||||||
com.microsoft.azure:azure-keyvault-core:1.0.0
|
com.microsoft.azure:azure-keyvault-core:1.0.0
|
||||||
com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7
|
com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7
|
||||||
org.bouncycastle:bcpkix-jdk15on:1.68
|
org.bouncycastle:bcpkix-jdk15on:1.60
|
||||||
org.bouncycastle:bcprov-jdk15on:1.68
|
org.bouncycastle:bcprov-jdk15on:1.60
|
||||||
org.checkerframework:checker-qual:2.5.2
|
org.checkerframework:checker-qual:2.5.2
|
||||||
org.codehaus.mojo:animal-sniffer-annotations:1.17
|
org.codehaus.mojo:animal-sniffer-annotations:1.17
|
||||||
org.jruby.jcodings:jcodings:1.0.13
|
org.jruby.jcodings:jcodings:1.0.13
|
||||||
|
@ -499,17 +483,18 @@ org.slf4j:slf4j-log4j12:1.7.25
|
||||||
CDDL 1.1 + GPLv2 with classpath exception
|
CDDL 1.1 + GPLv2 with classpath exception
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|
||||||
com.github.pjfanning:jersey-json:1.20
|
com.sun.jersey:jersey-client:1.19
|
||||||
com.sun.jersey:jersey-client:1.19.4
|
com.sun.jersey:jersey-core:1.19
|
||||||
com.sun.jersey:jersey-core:1.19.4
|
com.sun.jersey:jersey-guice:1.19
|
||||||
com.sun.jersey:jersey-guice:1.19.4
|
com.sun.jersey:jersey-json:1.19
|
||||||
com.sun.jersey:jersey-server:1.19.4
|
com.sun.jersey:jersey-server:1.19
|
||||||
com.sun.jersey:jersey-servlet:1.19.4
|
com.sun.jersey:jersey-servlet:1.19
|
||||||
com.sun.xml.bind:jaxb-impl:2.2.3-1
|
com.sun.xml.bind:jaxb-impl:2.2.3-1
|
||||||
javax.annotation:javax.annotation-api:1.3.2
|
javax.annotation:javax.annotation-api:1.3.2
|
||||||
javax.servlet:javax.servlet-api:3.1.0
|
javax.servlet:javax.servlet-api:3.1.0
|
||||||
javax.servlet.jsp:jsp-api:2.1
|
javax.servlet.jsp:jsp-api:2.1
|
||||||
javax.websocket:javax.websocket-api:1.0
|
javax.websocket:javax.websocket-api:1.0
|
||||||
|
javax.ws.rs:javax.ws.rs-api:2.1.1
|
||||||
javax.ws.rs:jsr311-api:1.1.1
|
javax.ws.rs:jsr311-api:1.1.1
|
||||||
javax.xml.bind:jaxb-api:2.2.11
|
javax.xml.bind:jaxb-api:2.2.11
|
||||||
|
|
||||||
|
@ -518,20 +503,18 @@ Eclipse Public License 1.0
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
junit:junit:4.13.2
|
junit:junit:4.13.2
|
||||||
org.jacoco:org.jacoco.agent:0.8.5
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
HSQL License
|
HSQL License
|
||||||
------------
|
------------
|
||||||
|
|
||||||
org.hsqldb:hsqldb:2.7.1
|
org.hsqldb:hsqldb:2.3.4
|
||||||
|
|
||||||
|
|
||||||
JDOM License
|
JDOM License
|
||||||
------------
|
------------
|
||||||
|
|
||||||
org.jdom:jdom2:2.0.6.1
|
org.jdom:jdom:1.1
|
||||||
|
|
||||||
|
|
||||||
Public Domain
|
Public Domain
|
||||||
|
|
|
@ -245,14 +245,14 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.css
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.6.0.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.5.1.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
|
||||||
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
|
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
|
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
|
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
|
||||||
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
|
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
|
||||||
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
|
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.11.5/*
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.10.18/*
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
|
||||||
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL
|
||||||
|
|
|
@ -66,7 +66,7 @@ available from http://www.digip.org/jansson/.
|
||||||
|
|
||||||
|
|
||||||
AWS SDK for Java
|
AWS SDK for Java
|
||||||
Copyright 2010-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||||
|
|
||||||
This product includes software developed by
|
This product includes software developed by
|
||||||
Amazon Technologies, Inc (http://www.amazon.com/).
|
Amazon Technologies, Inc (http://www.amazon.com/).
|
||||||
|
|
|
@ -47,7 +47,7 @@ pipeline {
|
||||||
|
|
||||||
options {
|
options {
|
||||||
buildDiscarder(logRotator(numToKeepStr: '5'))
|
buildDiscarder(logRotator(numToKeepStr: '5'))
|
||||||
timeout (time: 48, unit: 'HOURS')
|
timeout (time: 24, unit: 'HOURS')
|
||||||
timestamps()
|
timestamps()
|
||||||
checkoutToSubdirectory('src')
|
checkoutToSubdirectory('src')
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ pipeline {
|
||||||
environment {
|
environment {
|
||||||
YETUS='yetus'
|
YETUS='yetus'
|
||||||
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
|
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
|
||||||
YETUS_VERSION='rel/0.14.0'
|
YETUS_VERSION='f9ba0170a5787a5f4662d3769804fef0226a182f'
|
||||||
}
|
}
|
||||||
|
|
||||||
parameters {
|
parameters {
|
||||||
|
|
|
@ -293,7 +293,6 @@ function usage
|
||||||
echo "--security Emergency security release"
|
echo "--security Emergency security release"
|
||||||
echo "--sign Use .gnupg dir to sign the artifacts and jars"
|
echo "--sign Use .gnupg dir to sign the artifacts and jars"
|
||||||
echo "--version=[version] Use an alternative version string"
|
echo "--version=[version] Use an alternative version string"
|
||||||
echo "--mvnargs=[args] Extra Maven args to be provided when running mvn commands"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function option_parse
|
function option_parse
|
||||||
|
@ -348,9 +347,6 @@ function option_parse
|
||||||
--version=*)
|
--version=*)
|
||||||
HADOOP_VERSION=${i#*=}
|
HADOOP_VERSION=${i#*=}
|
||||||
;;
|
;;
|
||||||
--mvnargs=*)
|
|
||||||
MVNEXTRAARGS=${i#*=}
|
|
||||||
;;
|
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
|
@ -417,9 +413,6 @@ function option_parse
|
||||||
MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
|
MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
if [ -n "$MVNEXTRAARGS" ]; then
|
|
||||||
MVN_ARGS+=("$MVNEXTRAARGS")
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${SECURITYRELEASE}" = true ]]; then
|
if [[ "${SECURITYRELEASE}" = true ]]; then
|
||||||
if [[ ! -d "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}" ]]; then
|
if [[ ! -d "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}" ]]; then
|
||||||
|
@ -542,10 +535,6 @@ function makearelease
|
||||||
|
|
||||||
big_console_header "Cleaning the Source Tree"
|
big_console_header "Cleaning the Source Tree"
|
||||||
|
|
||||||
# Since CVE-2022-24765 in April 2022, git refuses to work in directories
|
|
||||||
# whose owner != the current user, unless explicitly told to trust it.
|
|
||||||
git config --global --add safe.directory /build/source
|
|
||||||
|
|
||||||
# git clean to clear any remnants from previous build
|
# git clean to clear any remnants from previous build
|
||||||
run "${GIT}" clean -xdf -e /patchprocess
|
run "${GIT}" clean -xdf -e /patchprocess
|
||||||
|
|
||||||
|
|
|
@ -20,20 +20,6 @@
|
||||||
# Override these to match Apache Hadoop's requirements
|
# Override these to match Apache Hadoop's requirements
|
||||||
personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
|
personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
|
||||||
|
|
||||||
# These flags are needed to run Yetus against Hadoop on Windows.
|
|
||||||
WINDOWS_FLAGS="-Pnative-win
|
|
||||||
-Dhttps.protocols=TLSv1.2
|
|
||||||
-Drequire.openssl
|
|
||||||
-Drequire.test.libhadoop
|
|
||||||
-Dshell-executable=${BASH_EXECUTABLE}
|
|
||||||
-Dopenssl.prefix=${VCPKG_INSTALLED_PACKAGES}
|
|
||||||
-Dcmake.prefix.path=${VCPKG_INSTALLED_PACKAGES}
|
|
||||||
-Dwindows.cmake.toolchain.file=${CMAKE_TOOLCHAIN_FILE}
|
|
||||||
-Dwindows.cmake.build.type=RelWithDebInfo
|
|
||||||
-Dwindows.build.hdfspp.dll=off
|
|
||||||
-Dwindows.no.sasl=on
|
|
||||||
-Duse.platformToolsetVersion=v142"
|
|
||||||
|
|
||||||
## @description Globals specific to this personality
|
## @description Globals specific to this personality
|
||||||
## @audience private
|
## @audience private
|
||||||
## @stability evolving
|
## @stability evolving
|
||||||
|
@ -101,30 +87,17 @@ function hadoop_order
|
||||||
echo "${hadoopm}"
|
echo "${hadoopm}"
|
||||||
}
|
}
|
||||||
|
|
||||||
## @description Retrieves the Hadoop project version defined in the root pom.xml
|
|
||||||
## @audience private
|
|
||||||
## @stability evolving
|
|
||||||
## @returns 0 on success, 1 on failure
|
|
||||||
function load_hadoop_version
|
|
||||||
{
|
|
||||||
if [[ -f "${BASEDIR}/pom.xml" ]]; then
|
|
||||||
HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
|
|
||||||
| head -1 \
|
|
||||||
| "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
|
|
||||||
| cut -f1 -d- )
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
## @description Determine if it is safe to run parallel tests
|
## @description Determine if it is safe to run parallel tests
|
||||||
## @audience private
|
## @audience private
|
||||||
## @stability evolving
|
## @stability evolving
|
||||||
## @param ordering
|
## @param ordering
|
||||||
function hadoop_test_parallel
|
function hadoop_test_parallel
|
||||||
{
|
{
|
||||||
if load_hadoop_version; then
|
if [[ -f "${BASEDIR}/pom.xml" ]]; then
|
||||||
|
HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
|
||||||
|
| head -1 \
|
||||||
|
| "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
|
||||||
|
| cut -f1 -d- )
|
||||||
export HADOOP_VERSION
|
export HADOOP_VERSION
|
||||||
else
|
else
|
||||||
return 1
|
return 1
|
||||||
|
@ -289,10 +262,7 @@ function hadoop_native_flags
|
||||||
Windows_NT|CYGWIN*|MINGW*|MSYS*)
|
Windows_NT|CYGWIN*|MINGW*|MSYS*)
|
||||||
echo \
|
echo \
|
||||||
"${args[@]}" \
|
"${args[@]}" \
|
||||||
-Drequire.snappy \
|
-Drequire.snappy -Drequire.openssl -Pnative-win
|
||||||
-Pdist \
|
|
||||||
-Dtar \
|
|
||||||
"${WINDOWS_FLAGS}"
|
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo \
|
echo \
|
||||||
|
@ -435,10 +405,7 @@ function personality_modules
|
||||||
extra="${extra} ${flags}"
|
extra="${extra} ${flags}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
|
extra="-Ptest-patch ${extra}"
|
||||||
extra="-Ptest-patch -Pdist -Dtar ${WINDOWS_FLAGS} ${extra}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
for module in $(hadoop_order ${ordering}); do
|
for module in $(hadoop_order ${ordering}); do
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
personality_enqueue_module ${module} ${extra}
|
personality_enqueue_module ${module} ${extra}
|
||||||
|
@ -581,28 +548,17 @@ function shadedclient_rebuild
|
||||||
|
|
||||||
big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
|
big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
|
||||||
|
|
||||||
extra="-Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true"
|
|
||||||
|
|
||||||
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
|
|
||||||
if load_hadoop_version; then
|
|
||||||
export HADOOP_HOME="${SOURCEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-SNAPSHOT"
|
|
||||||
else
|
|
||||||
yetus_error "[WARNING] Unable to extract the Hadoop version and thus HADOOP_HOME is not set. Some tests may fail."
|
|
||||||
fi
|
|
||||||
|
|
||||||
extra="${WINDOWS_FLAGS} ${extra}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo_and_redirect "${logfile}" \
|
echo_and_redirect "${logfile}" \
|
||||||
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am "${modules[@]}" "${extra}"
|
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
|
||||||
|
"${modules[@]}" \
|
||||||
|
-Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true
|
||||||
|
|
||||||
big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
|
big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
|
||||||
|
|
||||||
echo_and_redirect "${logfile}" \
|
echo_and_redirect "${logfile}" \
|
||||||
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
|
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
|
||||||
"${modules[@]}" \
|
"${modules[@]}" \
|
||||||
-DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true \
|
-DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true
|
||||||
-Dspotbugs.skip=true "${extra}"
|
|
||||||
|
|
||||||
count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
|
count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
|
||||||
if [[ ${count} -gt 0 ]]; then
|
if [[ ${count} -gt 0 ]]; then
|
||||||
|
|
|
@ -77,7 +77,7 @@ WANTED="$1"
|
||||||
shift
|
shift
|
||||||
ARGV=("$@")
|
ARGV=("$@")
|
||||||
|
|
||||||
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.0}
|
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.13.0}
|
||||||
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
|
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
|
||||||
BINDIR=$(dirname "${BIN}")
|
BINDIR=$(dirname "${BIN}")
|
||||||
|
|
||||||
|
@ -171,17 +171,7 @@ if [[ -n "${GPGBIN}" && ! "${HADOOP_SKIP_YETUS_VERIFICATION}" = true ]]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
|
if ! (gunzip -c "${TARBALL}.gz" | tar xpf -); then
|
||||||
gunzip -c "${TARBALL}.gz" | tar xpf -
|
|
||||||
|
|
||||||
# One of the entries in the Yetus tarball unzips a symlink qbt.sh.
|
|
||||||
# The symlink creation fails on Windows, unless this CI is run as Admin or Developer mode is
|
|
||||||
# enabled.
|
|
||||||
# Thus, we create the qbt.sh symlink ourselves and move it to the target.
|
|
||||||
YETUS_PRECOMMIT_DIR="${YETUS_PREFIX}-${HADOOP_YETUS_VERSION}/lib/precommit"
|
|
||||||
ln -s "${YETUS_PRECOMMIT_DIR}/test-patch.sh" qbt.sh
|
|
||||||
mv qbt.sh "${YETUS_PRECOMMIT_DIR}"
|
|
||||||
elif ! (gunzip -c "${TARBALL}.gz" | tar xpf -); then
|
|
||||||
yetus_error "ERROR: ${TARBALL}.gz is corrupt. Investigate and then remove ${HADOOP_PATCHPROCESS} to try again."
|
yetus_error "ERROR: ${TARBALL}.gz is corrupt. Investigate and then remove ${HADOOP_PATCHPROCESS} to try again."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -74,7 +74,7 @@ ENV PATH "${PATH}:/opt/protobuf/bin"
|
||||||
###
|
###
|
||||||
# Avoid out of memory errors in builds
|
# Avoid out of memory errors in builds
|
||||||
###
|
###
|
||||||
ENV MAVEN_OPTS -Xms256m -Xmx3072m
|
ENV MAVEN_OPTS -Xms256m -Xmx1536m
|
||||||
|
|
||||||
# Skip gpg verification when downloading Yetus via yetus-wrapper
|
# Skip gpg verification when downloading Yetus via yetus-wrapper
|
||||||
ENV HADOOP_SKIP_YETUS_VERIFICATION true
|
ENV HADOOP_SKIP_YETUS_VERIFICATION true
|
||||||
|
|
|
@ -30,13 +30,6 @@ COPY pkg-resolver pkg-resolver
|
||||||
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
|
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
|
||||||
&& chmod a+r pkg-resolver/*.json
|
&& chmod a+r pkg-resolver/*.json
|
||||||
|
|
||||||
######
|
|
||||||
# Centos 8 has reached its EOL and the packages
|
|
||||||
# are no longer available on mirror.centos.org site.
|
|
||||||
# Please see https://www.centos.org/centos-linux-eol/
|
|
||||||
######
|
|
||||||
RUN pkg-resolver/set-vault-as-baseurl-centos.sh centos:8
|
|
||||||
|
|
||||||
######
|
######
|
||||||
# Install packages from yum
|
# Install packages from yum
|
||||||
######
|
######
|
||||||
|
|
|
@ -82,7 +82,6 @@ ENV HADOOP_SKIP_YETUS_VERIFICATION true
|
||||||
####
|
####
|
||||||
# Install packages
|
# Install packages
|
||||||
####
|
####
|
||||||
RUN pkg-resolver/install-cmake.sh debian:10
|
|
||||||
RUN pkg-resolver/install-spotbugs.sh debian:10
|
RUN pkg-resolver/install-spotbugs.sh debian:10
|
||||||
RUN pkg-resolver/install-boost.sh debian:10
|
RUN pkg-resolver/install-boost.sh debian:10
|
||||||
RUN pkg-resolver/install-protobuf.sh debian:10
|
RUN pkg-resolver/install-protobuf.sh debian:10
|
||||||
|
|
|
@ -1,124 +0,0 @@
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Dockerfile for installing the necessary dependencies for building Hadoop.
|
|
||||||
# See BUILDING.txt.
|
|
||||||
|
|
||||||
FROM mcr.microsoft.com/windows:ltsc2019
|
|
||||||
|
|
||||||
# Need to disable the progress bar for speeding up the downloads.
|
|
||||||
# hadolint ignore=SC2086
|
|
||||||
RUN powershell $Global:ProgressPreference = 'SilentlyContinue'
|
|
||||||
|
|
||||||
# Restore the default Windows shell for correct batch processing.
|
|
||||||
SHELL ["cmd", "/S", "/C"]
|
|
||||||
|
|
||||||
# Install Visual Studio 2019 Build Tools.
|
|
||||||
RUN curl -SL --output vs_buildtools.exe https://aka.ms/vs/16/release/vs_buildtools.exe \
|
|
||||||
&& (start /w vs_buildtools.exe --quiet --wait --norestart --nocache \
|
|
||||||
--installPath "%ProgramFiles(x86)%\Microsoft Visual Studio\2019\BuildTools" \
|
|
||||||
--add Microsoft.VisualStudio.Workload.VCTools \
|
|
||||||
--add Microsoft.VisualStudio.Component.VC.ASAN \
|
|
||||||
--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 \
|
|
||||||
--add Microsoft.VisualStudio.Component.Windows10SDK.19041 \
|
|
||||||
|| IF "%ERRORLEVEL%"=="3010" EXIT 0) \
|
|
||||||
&& del /q vs_buildtools.exe
|
|
||||||
|
|
||||||
# Install Chocolatey.
|
|
||||||
RUN powershell -NoProfile -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))"
|
|
||||||
RUN setx PATH "%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
|
|
||||||
|
|
||||||
# Install git.
|
|
||||||
RUN choco install git.install -y
|
|
||||||
RUN powershell Copy-Item -Recurse -Path 'C:\Program Files\Git' -Destination C:\Git
|
|
||||||
|
|
||||||
# Install vcpkg.
|
|
||||||
# hadolint ignore=DL3003
|
|
||||||
RUN powershell git clone https://github.com/microsoft/vcpkg.git \
|
|
||||||
&& cd vcpkg \
|
|
||||||
&& git checkout 7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d \
|
|
||||||
&& .\bootstrap-vcpkg.bat
|
|
||||||
RUN powershell .\vcpkg\vcpkg.exe install boost:x64-windows
|
|
||||||
RUN powershell .\vcpkg\vcpkg.exe install protobuf:x64-windows
|
|
||||||
RUN powershell .\vcpkg\vcpkg.exe install openssl:x64-windows
|
|
||||||
RUN powershell .\vcpkg\vcpkg.exe install zlib:x64-windows
|
|
||||||
ENV PROTOBUF_HOME "C:\vcpkg\installed\x64-windows"
|
|
||||||
|
|
||||||
# Install Azul Java 8 JDK.
|
|
||||||
RUN powershell Invoke-WebRequest -URI https://cdn.azul.com/zulu/bin/zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -OutFile $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip
|
|
||||||
RUN powershell Expand-Archive -Path $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -DestinationPath "C:\Java"
|
|
||||||
ENV JAVA_HOME "C:\Java\zulu8.62.0.19-ca-jdk8.0.332-win_x64"
|
|
||||||
RUN setx PATH "%PATH%;%JAVA_HOME%\bin"
|
|
||||||
|
|
||||||
# Install Apache Maven.
|
|
||||||
RUN powershell Invoke-WebRequest -URI https://archive.apache.org/dist/maven/maven-3/3.8.6/binaries/apache-maven-3.8.6-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.6-bin.zip
|
|
||||||
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.6-bin.zip -DestinationPath "C:\Maven"
|
|
||||||
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.6\bin"
|
|
||||||
ENV MAVEN_OPTS '-Xmx2048M -Xss128M'
|
|
||||||
|
|
||||||
# Install CMake 3.19.0.
|
|
||||||
RUN powershell Invoke-WebRequest -URI https://cmake.org/files/v3.19/cmake-3.19.0-win64-x64.zip -OutFile $Env:TEMP\cmake-3.19.0-win64-x64.zip
|
|
||||||
RUN powershell Expand-Archive -Path $Env:TEMP\cmake-3.19.0-win64-x64.zip -DestinationPath "C:\CMake"
|
|
||||||
RUN setx PATH "%PATH%;C:\CMake\cmake-3.19.0-win64-x64\bin"
|
|
||||||
|
|
||||||
# Install zstd 1.5.4.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://github.com/facebook/zstd/releases/download/v1.5.4/zstd-v1.5.4-win64.zip -OutFile $Env:TEMP\zstd-v1.5.4-win64.zip
|
|
||||||
RUN powershell Expand-Archive -Path $Env:TEMP\zstd-v1.5.4-win64.zip -DestinationPath "C:\ZStd"
|
|
||||||
RUN setx PATH "%PATH%;C:\ZStd"
|
|
||||||
|
|
||||||
# Install libopenssl 3.1.0 needed for rsync 3.2.7.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libopenssl-3.1.0-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar.zst
|
|
||||||
RUN powershell zstd -d $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar.zst -o $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar
|
|
||||||
RUN powershell mkdir "C:\LibOpenSSL"
|
|
||||||
RUN powershell tar -xvf $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar -C "C:\LibOpenSSL"
|
|
||||||
|
|
||||||
# Install libxxhash 0.8.1 needed for rsync 3.2.7.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libxxhash-0.8.1-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar.zst
|
|
||||||
RUN powershell zstd -d $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar.zst -o $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar
|
|
||||||
RUN powershell mkdir "C:\LibXXHash"
|
|
||||||
RUN powershell tar -xvf $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar -C "C:\LibXXHash"
|
|
||||||
|
|
||||||
# Install libzstd 1.5.4 needed for rsync 3.2.7.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libzstd-1.5.4-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar.zst
|
|
||||||
RUN powershell zstd -d $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar.zst -o $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar
|
|
||||||
RUN powershell mkdir "C:\LibZStd"
|
|
||||||
RUN powershell tar -xvf $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar -C "C:\LibZStd"
|
|
||||||
|
|
||||||
# Install rsync 3.2.7.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/rsync-3.2.7-2-x86_64.pkg.tar.zst -OutFile $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar.zst
|
|
||||||
RUN powershell zstd -d $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar.zst -o $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar
|
|
||||||
RUN powershell mkdir "C:\RSync"
|
|
||||||
RUN powershell tar -xvf $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar -C "C:\RSync"
|
|
||||||
# Copy the dependencies of rsync 3.2.7.
|
|
||||||
RUN powershell Copy-Item -Path "C:\LibOpenSSL\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
|
|
||||||
RUN powershell Copy-Item -Path "C:\LibXXHash\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
|
|
||||||
RUN powershell Copy-Item -Path "C:\LibZStd\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
|
|
||||||
RUN powershell Copy-Item -Path "C:\RSync\usr\bin\*" -Destination "C:\Program` Files\Git\usr\bin"
|
|
||||||
|
|
||||||
# Install Python 3.10.11.
|
|
||||||
RUN powershell Invoke-WebRequest -Uri https://www.python.org/ftp/python/3.10.11/python-3.10.11-embed-amd64.zip -OutFile $Env:TEMP\python-3.10.11-embed-amd64.zip
|
|
||||||
RUN powershell Expand-Archive -Path $Env:TEMP\python-3.10.11-embed-amd64.zip -DestinationPath "C:\Python3"
|
|
||||||
RUN powershell New-Item -ItemType HardLink -Value "C:\Python3\python.exe" -Path "C:\Python3\python3.exe"
|
|
||||||
RUN setx path "%PATH%;C:\Python3"
|
|
||||||
|
|
||||||
# We get strange Javadoc errors without this.
|
|
||||||
RUN setx classpath ""
|
|
||||||
|
|
||||||
RUN git config --global core.longpaths true
|
|
||||||
RUN setx PATH "%PATH%;C:\Program Files\Git\usr\bin"
|
|
||||||
|
|
||||||
# Define the entry point for the docker container.
|
|
||||||
ENTRYPOINT ["C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC\\Auxiliary\\Build\\vcvars64.bat", "&&", "cmd.exe"]
|
|
|
@ -40,7 +40,7 @@ fi
|
||||||
|
|
||||||
if [ "$version_to_install" == "3.6.3" ]; then
|
if [ "$version_to_install" == "3.6.3" ]; then
|
||||||
mkdir -p /opt/maven /tmp/maven &&
|
mkdir -p /opt/maven /tmp/maven &&
|
||||||
curl -L -s -S https://dlcdn.apache.org/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz \
|
curl -L -s -S https://mirrors.estointernet.in/apache/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz \
|
||||||
-o /tmp/maven/apache-maven-3.6.3-bin.tar.gz &&
|
-o /tmp/maven/apache-maven-3.6.3-bin.tar.gz &&
|
||||||
tar xzf /tmp/maven/apache-maven-3.6.3-bin.tar.gz --strip-components 1 -C /opt/maven
|
tar xzf /tmp/maven/apache-maven-3.6.3-bin.tar.gz --strip-components 1 -C /opt/maven
|
||||||
else
|
else
|
||||||
|
|
|
@ -62,6 +62,7 @@
|
||||||
"centos:8": "clang"
|
"centos:8": "clang"
|
||||||
},
|
},
|
||||||
"cmake": {
|
"cmake": {
|
||||||
|
"debian:10": "cmake",
|
||||||
"ubuntu:focal": "cmake",
|
"ubuntu:focal": "cmake",
|
||||||
"ubuntu:focal::arch64": "cmake"
|
"ubuntu:focal::arch64": "cmake"
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
if [ $# -lt 1 ]; then
|
|
||||||
echo "ERROR: Need at least 1 argument, $# were provided"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$1" == "centos:7" ] || [ "$1" == "centos:8" ]; then
|
|
||||||
cd /etc/yum.repos.d/ || exit &&
|
|
||||||
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* &&
|
|
||||||
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* &&
|
|
||||||
yum update -y &&
|
|
||||||
cd /root || exit
|
|
||||||
else
|
|
||||||
echo "ERROR: Setting the archived baseurl is only supported for centos 7 and 8 environments"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
|
@ -1,134 +0,0 @@
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
|
|
||||||
Apache Hadoop Git/Jira FixVersion validation
|
|
||||||
============================================================
|
|
||||||
|
|
||||||
Git commits in Apache Hadoop contains Jira number of the format
|
|
||||||
HADOOP-XXXX or HDFS-XXXX or YARN-XXXX or MAPREDUCE-XXXX.
|
|
||||||
While creating a release candidate, we also include changelist
|
|
||||||
and this changelist can be identified based on Fixed/Closed Jiras
|
|
||||||
with the correct fix versions. However, sometimes we face few
|
|
||||||
inconsistencies between fixed Jira and Git commit message.
|
|
||||||
|
|
||||||
git_jira_fix_version_check.py script takes care of
|
|
||||||
identifying all git commits with commit
|
|
||||||
messages with any of these issues:
|
|
||||||
|
|
||||||
1. commit is reverted as per commit message
|
|
||||||
2. commit does not contain Jira number format in message
|
|
||||||
3. Jira does not have expected fixVersion
|
|
||||||
4. Jira has expected fixVersion, but it is not yet resolved
|
|
||||||
|
|
||||||
Moreover, this script also finds any resolved Jira with expected
|
|
||||||
fixVersion but without any corresponding commit present.
|
|
||||||
|
|
||||||
This should be useful as part of RC preparation.
|
|
||||||
|
|
||||||
git_jira_fix_version_check supports python3 and it required
|
|
||||||
installation of jira:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ python3 --version
|
|
||||||
Python 3.9.7
|
|
||||||
|
|
||||||
$ python3 -m venv ./venv
|
|
||||||
|
|
||||||
$ ./venv/bin/pip install -r dev-support/git-jira-validation/requirements.txt
|
|
||||||
|
|
||||||
$ ./venv/bin/python dev-support/git-jira-validation/git_jira_fix_version_check.py
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
The script also requires below inputs:
|
|
||||||
```
|
|
||||||
1. First commit hash to start excluding commits from history:
|
|
||||||
Usually we can provide latest commit hash from last tagged release
|
|
||||||
so that the script will only loop through all commits in git commit
|
|
||||||
history before this commit hash. e.g for 3.3.2 release, we can provide
|
|
||||||
git hash: fa4915fdbbbec434ab41786cb17b82938a613f16
|
|
||||||
because this commit bumps up hadoop pom versions to 3.3.2:
|
|
||||||
https://github.com/apache/hadoop/commit/fa4915fdbbbec434ab41786cb17b82938a613f16
|
|
||||||
|
|
||||||
2. Fix Version:
|
|
||||||
Exact fixVersion that we would like to compare all Jira's fixVersions
|
|
||||||
with. e.g for 3.3.2 release, it should be 3.3.2.
|
|
||||||
|
|
||||||
3. JIRA Project Name:
|
|
||||||
The exact name of Project as case-sensitive e.g HADOOP / OZONE
|
|
||||||
|
|
||||||
4. Path of project's working dir with release branch checked-in:
|
|
||||||
Path of project from where we want to compare git hashes from. Local fork
|
|
||||||
of the project should be up-to date with upstream and expected release
|
|
||||||
branch should be checked-in.
|
|
||||||
|
|
||||||
5. Jira server url (default url: https://issues.apache.org/jira):
|
|
||||||
Default value of server points to ASF Jiras but this script can be
|
|
||||||
used outside of ASF Jira too.
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Example of script execution:
|
|
||||||
```
|
|
||||||
JIRA Project Name (e.g HADOOP / OZONE etc): HADOOP
|
|
||||||
First commit hash to start excluding commits from history: fa4915fdbbbec434ab41786cb17b82938a613f16
|
|
||||||
Fix Version: 3.3.2
|
|
||||||
Jira server url (default: https://issues.apache.org/jira):
|
|
||||||
Path of project's working dir with release branch checked-in: /Users/vjasani/Documents/src/hadoop-3.3/hadoop
|
|
||||||
|
|
||||||
Check git status output and verify expected branch
|
|
||||||
|
|
||||||
On branch branch-3.3.2
|
|
||||||
Your branch is up to date with 'origin/branch-3.3.2'.
|
|
||||||
|
|
||||||
nothing to commit, working tree clean
|
|
||||||
|
|
||||||
|
|
||||||
Jira/Git commit message diff starting: ##############################################
|
|
||||||
Jira not present with version: 3.3.2. Commit: 8cd8e435fb43a251467ca74fadcb14f21a3e8163 HADOOP-17198. Support S3 Access Points (#3260) (branch-3.3.2) (#3955)
|
|
||||||
WARN: Jira not found. Commit: 8af28b7cca5c6020de94e739e5373afc69f399e5 Updated the index as per 3.3.2 release
|
|
||||||
WARN: Jira not found. Commit: e42e483d0085aa46543ebcb1196dd155ddb447d0 Make upstream aware of 3.3.1 release
|
|
||||||
Commit seems reverted. Commit: 6db1165380cd308fb74c9d17a35c1e57174d1e09 Revert "HDFS-14099. Unknown frame descriptor when decompressing multiple frames (#3836)"
|
|
||||||
Commit seems reverted. Commit: 1e3f94fa3c3d4a951d4f7438bc13e6f008f228f4 Revert "HDFS-16333. fix balancer bug when transfer an EC block (#3679)"
|
|
||||||
Jira not present with version: 3.3.2. Commit: ce0bc7b473a62a580c1227a4de6b10b64b045d3a HDFS-16344. Improve DirectoryScanner.Stats#toString (#3695)
|
|
||||||
Jira not present with version: 3.3.2. Commit: 30f0629d6e6f735c9f4808022f1a1827c5531f75 HDFS-16339. Show the threshold when mover threads quota is exceeded (#3689)
|
|
||||||
Jira not present with version: 3.3.2. Commit: e449daccf486219e3050254d667b74f92e8fc476 YARN-11007. Correct words in YARN documents (#3680)
|
|
||||||
Commit seems reverted. Commit: 5c189797828e60a3329fd920ecfb99bcbccfd82d Revert "HDFS-16336. Addendum: De-flake TestRollingUpgrade#testRollback (#3686)"
|
|
||||||
Jira not present with version: 3.3.2. Commit: 544dffd179ed756bc163e4899e899a05b93d9234 HDFS-16171. De-flake testDecommissionStatus (#3280)
|
|
||||||
Jira not present with version: 3.3.2. Commit: c6914b1cb6e4cab8263cd3ae5cc00bc7a8de25de HDFS-16350. Datanode start time should be set after RPC server starts successfully (#3711)
|
|
||||||
Jira not present with version: 3.3.2. Commit: 328d3b84dfda9399021ccd1e3b7afd707e98912d HDFS-16336. Addendum: De-flake TestRollingUpgrade#testRollback (#3686)
|
|
||||||
Jira not present with version: 3.3.2. Commit: 3ae8d4ccb911c9ababd871824a2fafbb0272c016 HDFS-16336. De-flake TestRollingUpgrade#testRollback (#3686)
|
|
||||||
Jira not present with version: 3.3.2. Commit: 15d3448e25c797b7d0d401afdec54683055d4bb5 HADOOP-17975. Fallback to simple auth does not work for a secondary DistributedFileSystem instance. (#3579)
|
|
||||||
Jira not present with version: 3.3.2. Commit: dd50261219de71eaa0a1ad28529953e12dfb92e0 YARN-10991. Fix to ignore the grouping "[]" for resourcesStr in parseResourcesString method (#3592)
|
|
||||||
Jira not present with version: 3.3.2. Commit: ef462b21bf03b10361d2f9ea7b47d0f7360e517f HDFS-16332. Handle invalid token exception in sasl handshake (#3677)
|
|
||||||
WARN: Jira not found. Commit: b55edde7071419410ea5bea4ce6462b980e48f5b Also update hadoop.version to 3.3.2
|
|
||||||
...
|
|
||||||
...
|
|
||||||
...
|
|
||||||
Found first commit hash after which git history is redundant. commit: fa4915fdbbbec434ab41786cb17b82938a613f16
|
|
||||||
Exiting successfully
|
|
||||||
Jira/Git commit message diff completed: ##############################################
|
|
||||||
|
|
||||||
Any resolved Jira with fixVersion 3.3.2 but corresponding commit not present
|
|
||||||
Starting diff: ##############################################
|
|
||||||
HADOOP-18066 is marked resolved with fixVersion 3.3.2 but no corresponding commit found
|
|
||||||
HADOOP-17936 is marked resolved with fixVersion 3.3.2 but no corresponding commit found
|
|
||||||
Completed diff: ##############################################
|
|
||||||
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
|
@ -1,117 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
############################################################################
|
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
############################################################################
|
|
||||||
"""An application to assist Release Managers with ensuring that histories in
|
|
||||||
Git and fixVersions in JIRA are in agreement. See README.md for a detailed
|
|
||||||
explanation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from jira import JIRA
|
|
||||||
|
|
||||||
jira_project_name = input("JIRA Project Name (e.g HADOOP / OZONE etc): ") \
|
|
||||||
or "HADOOP"
|
|
||||||
# Define project_jira_keys with - appended. e.g for HADOOP Jiras,
|
|
||||||
# project_jira_keys should include HADOOP-, HDFS-, YARN-, MAPREDUCE-
|
|
||||||
project_jira_keys = [jira_project_name + '-']
|
|
||||||
if jira_project_name == 'HADOOP':
|
|
||||||
project_jira_keys.append('HDFS-')
|
|
||||||
project_jira_keys.append('YARN-')
|
|
||||||
project_jira_keys.append('MAPREDUCE-')
|
|
||||||
|
|
||||||
first_exclude_commit_hash = input("First commit hash to start excluding commits from history: ")
|
|
||||||
fix_version = input("Fix Version: ")
|
|
||||||
|
|
||||||
jira_server_url = input(
|
|
||||||
"Jira server url (default: https://issues.apache.org/jira): ") \
|
|
||||||
or "https://issues.apache.org/jira"
|
|
||||||
|
|
||||||
jira = JIRA(server=jira_server_url)
|
|
||||||
|
|
||||||
local_project_dir = input("Path of project's working dir with release branch checked-in: ")
|
|
||||||
os.chdir(local_project_dir)
|
|
||||||
|
|
||||||
GIT_STATUS_MSG = subprocess.check_output(['git', 'status']).decode("utf-8")
|
|
||||||
print('\nCheck git status output and verify expected branch\n')
|
|
||||||
print(GIT_STATUS_MSG)
|
|
||||||
|
|
||||||
print('\nJira/Git commit message diff starting: ##############################################')
|
|
||||||
|
|
||||||
issue_set_from_commit_msg = set()
|
|
||||||
|
|
||||||
for commit in subprocess.check_output(['git', 'log', '--pretty=oneline']).decode(
|
|
||||||
"utf-8").splitlines():
|
|
||||||
if commit.startswith(first_exclude_commit_hash):
|
|
||||||
print("Found first commit hash after which git history is redundant. commit: "
|
|
||||||
+ first_exclude_commit_hash)
|
|
||||||
print("Exiting successfully")
|
|
||||||
break
|
|
||||||
if re.search('revert', commit, re.IGNORECASE):
|
|
||||||
print("Commit seems reverted. \t\t\t Commit: " + commit)
|
|
||||||
continue
|
|
||||||
ACTUAL_PROJECT_JIRA = None
|
|
||||||
matches = re.findall('|'.join(project_jira_keys), commit)
|
|
||||||
if matches:
|
|
||||||
ACTUAL_PROJECT_JIRA = matches[0]
|
|
||||||
if not ACTUAL_PROJECT_JIRA:
|
|
||||||
print("WARN: Jira not found. \t\t\t Commit: " + commit)
|
|
||||||
continue
|
|
||||||
JIRA_NUM = ''
|
|
||||||
for c in commit.split(ACTUAL_PROJECT_JIRA)[1]:
|
|
||||||
if c.isdigit():
|
|
||||||
JIRA_NUM = JIRA_NUM + c
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
issue = jira.issue(ACTUAL_PROJECT_JIRA + JIRA_NUM)
|
|
||||||
EXPECTED_FIX_VERSION = False
|
|
||||||
for version in issue.fields.fixVersions:
|
|
||||||
if version.name == fix_version:
|
|
||||||
EXPECTED_FIX_VERSION = True
|
|
||||||
break
|
|
||||||
if not EXPECTED_FIX_VERSION:
|
|
||||||
print("Jira not present with version: " + fix_version + ". \t Commit: " + commit)
|
|
||||||
continue
|
|
||||||
if issue.fields.status is None or issue.fields.status.name not in ('Resolved', 'Closed'):
|
|
||||||
print("Jira is not resolved yet? \t\t Commit: " + commit)
|
|
||||||
else:
|
|
||||||
# This means Jira corresponding to current commit message is resolved with expected
|
|
||||||
# fixVersion.
|
|
||||||
# This is no-op by default, if needed, convert to print statement.
|
|
||||||
issue_set_from_commit_msg.add(ACTUAL_PROJECT_JIRA + JIRA_NUM)
|
|
||||||
|
|
||||||
print('Jira/Git commit message diff completed: ##############################################')
|
|
||||||
|
|
||||||
print('\nAny resolved Jira with fixVersion ' + fix_version
|
|
||||||
+ ' but corresponding commit not present')
|
|
||||||
print('Starting diff: ##############################################')
|
|
||||||
all_issues_with_fix_version = jira.search_issues(
|
|
||||||
'project=' + jira_project_name + ' and status in (Resolved,Closed) and fixVersion='
|
|
||||||
+ fix_version)
|
|
||||||
|
|
||||||
for issue in all_issues_with_fix_version:
|
|
||||||
if issue.key not in issue_set_from_commit_msg:
|
|
||||||
print(issue.key + ' is marked resolved with fixVersion ' + fix_version
|
|
||||||
+ ' but no corresponding commit found')
|
|
||||||
|
|
||||||
print('Completed diff: ##############################################')
|
|
|
@ -1,18 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
jira==3.1.1
|
|
|
@ -1,204 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing,
|
|
||||||
# software distributed under the License is distributed on an
|
|
||||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
# KIND, either express or implied. See the License for the
|
|
||||||
# specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
# This script is useful to perform basic sanity tests for the given
|
|
||||||
# Hadoop RC. It checks for the Checksum, Signature, Rat check,
|
|
||||||
# Build from source and building tarball from the source.
|
|
||||||
|
|
||||||
set -e -o pipefail
|
|
||||||
|
|
||||||
usage() {
|
|
||||||
SCRIPT=$(basename "${BASH_SOURCE[@]}")
|
|
||||||
|
|
||||||
cat << __EOF
|
|
||||||
hadoop-vote. A script for standard vote which verifies the following items
|
|
||||||
1. Checksum of sources and binaries
|
|
||||||
2. Signature of sources and binaries
|
|
||||||
3. Rat check
|
|
||||||
4. Built from source
|
|
||||||
5. Built tar from source
|
|
||||||
|
|
||||||
Usage: ${SCRIPT} -s | --source <url> [-k | --key <signature>] [-f | --keys-file-url <url>] [-o | --output-dir </path/to/use>] [-D property[=value]] [-P profiles]
|
|
||||||
${SCRIPT} -h | --help
|
|
||||||
|
|
||||||
-h | --help Show this screen.
|
|
||||||
-s | --source '<url>' A URL pointing to the release candidate sources and binaries
|
|
||||||
e.g. https://dist.apache.org/repos/dist/dev/hadoop/hadoop-<version>RC0/
|
|
||||||
-k | --key '<signature>' A signature of the public key, e.g. 9AD2AE49
|
|
||||||
-f | --keys-file-url '<url>' the URL of the key file, default is
|
|
||||||
https://downloads.apache.org/hadoop/common/KEYS
|
|
||||||
-o | --output-dir '</path>' directory which has the stdout and stderr of each verification target
|
|
||||||
-D | list of maven properties to set for the mvn invocations, e.g. <-D hbase.profile=2.0 -D skipTests> Defaults to unset
|
|
||||||
-P | list of maven profiles to set for the build from source, e.g. <-P native -P yarn-ui>
|
|
||||||
__EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
MVN_PROPERTIES=()
|
|
||||||
MVN_PROFILES=()
|
|
||||||
|
|
||||||
while ((${#})); do
|
|
||||||
case "${1}" in
|
|
||||||
-h | --help )
|
|
||||||
usage; exit 0 ;;
|
|
||||||
-s | --source )
|
|
||||||
SOURCE_URL="${2}"; shift 2 ;;
|
|
||||||
-k | --key )
|
|
||||||
SIGNING_KEY="${2}"; shift 2 ;;
|
|
||||||
-f | --keys-file-url )
|
|
||||||
KEY_FILE_URL="${2}"; shift 2 ;;
|
|
||||||
-o | --output-dir )
|
|
||||||
OUTPUT_DIR="${2}"; shift 2 ;;
|
|
||||||
-D )
|
|
||||||
MVN_PROPERTIES+=("-D ${2}"); shift 2 ;;
|
|
||||||
-P )
|
|
||||||
MVN_PROFILES+=("-P ${2}"); shift 2 ;;
|
|
||||||
* )
|
|
||||||
usage >&2; exit 1 ;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Source url must be provided
|
|
||||||
if [ -z "${SOURCE_URL}" ]; then
|
|
||||||
usage;
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat << __EOF
|
|
||||||
Although This tool helps verifying Hadoop RC build and unit tests,
|
|
||||||
operator may still consider verifying the following manually:
|
|
||||||
1. Verify the API compatibility report
|
|
||||||
2. Integration/performance/benchmark tests
|
|
||||||
3. Object store specific Integration tests against an endpoint
|
|
||||||
4. Verify overall unit test stability from Jenkins builds or locally
|
|
||||||
5. Other concerns if any
|
|
||||||
__EOF
|
|
||||||
|
|
||||||
[[ "${SOURCE_URL}" != */ ]] && SOURCE_URL="${SOURCE_URL}/"
|
|
||||||
HADOOP_RC_VERSION=$(tr "/" "\n" <<< "${SOURCE_URL}" | tail -n2)
|
|
||||||
HADOOP_VERSION=$(echo "${HADOOP_RC_VERSION}" | sed -e 's/-RC[0-9]//g' | sed -e 's/hadoop-//g')
|
|
||||||
JAVA_VERSION=$(java -version 2>&1 | cut -f3 -d' ' | head -n1 | sed -e 's/"//g')
|
|
||||||
OUTPUT_DIR="${OUTPUT_DIR:-$(pwd)}"
|
|
||||||
|
|
||||||
if [ ! -d "${OUTPUT_DIR}" ]; then
|
|
||||||
echo "Output directory ${OUTPUT_DIR} does not exist, please create it before running this script."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
OUTPUT_PATH_PREFIX="${OUTPUT_DIR}"/"${HADOOP_RC_VERSION}"
|
|
||||||
|
|
||||||
# default value for verification targets, 0 = failed
|
|
||||||
SIGNATURE_PASSED=0
|
|
||||||
CHECKSUM_PASSED=0
|
|
||||||
RAT_CHECK_PASSED=0
|
|
||||||
BUILD_FROM_SOURCE_PASSED=0
|
|
||||||
BUILD_TAR_FROM_SOURCE_PASSED=0
|
|
||||||
|
|
||||||
function download_and_import_keys() {
|
|
||||||
KEY_FILE_URL="${KEY_FILE_URL:-https://downloads.apache.org/hadoop/common/KEYS}"
|
|
||||||
echo "Obtain and import the publisher key(s) from ${KEY_FILE_URL}"
|
|
||||||
# download the keys file into file KEYS
|
|
||||||
wget -O KEYS "${KEY_FILE_URL}"
|
|
||||||
gpg --import KEYS
|
|
||||||
if [ -n "${SIGNING_KEY}" ]; then
|
|
||||||
gpg --list-keys "${SIGNING_KEY}"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function download_release_candidate () {
|
|
||||||
# get all files from release candidate repo
|
|
||||||
wget -r -np -N -nH --cut-dirs 4 "${SOURCE_URL}"
|
|
||||||
}
|
|
||||||
|
|
||||||
function verify_signatures() {
|
|
||||||
rm -f "${OUTPUT_PATH_PREFIX}"_verify_signatures
|
|
||||||
for file in *.tar.gz; do
|
|
||||||
gpg --verify "${file}".asc "${file}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_signatures && SIGNATURE_PASSED=1 || SIGNATURE_PASSED=0
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
function verify_checksums() {
|
|
||||||
rm -f "${OUTPUT_PATH_PREFIX}"_verify_checksums
|
|
||||||
SHA_EXT=$(find . -name "*.sha*" | awk -F '.' '{ print $NF }' | head -n 1)
|
|
||||||
for file in *.tar.gz; do
|
|
||||||
sha512sum --tag "${file}" > "${file}"."${SHA_EXT}".tmp
|
|
||||||
diff "${file}"."${SHA_EXT}".tmp "${file}"."${SHA_EXT}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_checksums && CHECKSUM_PASSED=1 || CHECKSUM_PASSED=0
|
|
||||||
rm -f "${file}"."${SHA_EXT}".tmp
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
function unzip_from_source() {
|
|
||||||
tar -zxvf hadoop-"${HADOOP_VERSION}"-src.tar.gz
|
|
||||||
cd hadoop-"${HADOOP_VERSION}"-src
|
|
||||||
}
|
|
||||||
|
|
||||||
function rat_test() {
|
|
||||||
rm -f "${OUTPUT_PATH_PREFIX}"_rat_test
|
|
||||||
mvn clean apache-rat:check "${MVN_PROPERTIES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_rat_test && RAT_CHECK_PASSED=1
|
|
||||||
}
|
|
||||||
|
|
||||||
function build_from_source() {
|
|
||||||
rm -f "${OUTPUT_PATH_PREFIX}"_build_from_source
|
|
||||||
# No unit test run.
|
|
||||||
mvn clean install "${MVN_PROPERTIES[@]}" -DskipTests "${MVN_PROFILES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_from_source && BUILD_FROM_SOURCE_PASSED=1
|
|
||||||
}
|
|
||||||
|
|
||||||
function build_tar_from_source() {
|
|
||||||
rm -f "${OUTPUT_PATH_PREFIX}"_build_tar_from_source
|
|
||||||
# No unit test run.
|
|
||||||
mvn clean package "${MVN_PROPERTIES[@]}" -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_tar_from_source && BUILD_TAR_FROM_SOURCE_PASSED=1
|
|
||||||
}
|
|
||||||
|
|
||||||
function execute() {
|
|
||||||
${1} || print_when_exit
|
|
||||||
}
|
|
||||||
|
|
||||||
function print_when_exit() {
|
|
||||||
cat << __EOF
|
|
||||||
* Signature: $( ((SIGNATURE_PASSED)) && echo "ok" || echo "failed" )
|
|
||||||
* Checksum : $( ((CHECKSUM_PASSED)) && echo "ok" || echo "failed" )
|
|
||||||
* Rat check (${JAVA_VERSION}): $( ((RAT_CHECK_PASSED)) && echo "ok" || echo "failed" )
|
|
||||||
- mvn clean apache-rat:check ${MVN_PROPERTIES[@]}
|
|
||||||
* Built from source (${JAVA_VERSION}): $( ((BUILD_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
|
|
||||||
- mvn clean install ${MVN_PROPERTIES[@]} -DskipTests ${MVN_PROFILES[@]}
|
|
||||||
* Built tar from source (${JAVA_VERSION}): $( ((BUILD_TAR_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
|
|
||||||
- mvn clean package ${MVN_PROPERTIES[@]} -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true
|
|
||||||
__EOF
|
|
||||||
if ((CHECKSUM_PASSED)) && ((SIGNATURE_PASSED)) && ((RAT_CHECK_PASSED)) && ((BUILD_FROM_SOURCE_PASSED)) && ((BUILD_TAR_FROM_SOURCE_PASSED)) ; then
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
pushd "${OUTPUT_DIR}"
|
|
||||||
|
|
||||||
download_and_import_keys
|
|
||||||
download_release_candidate
|
|
||||||
|
|
||||||
pushd "${HADOOP_RC_VERSION}"
|
|
||||||
|
|
||||||
execute verify_signatures
|
|
||||||
execute verify_checksums
|
|
||||||
execute unzip_from_source
|
|
||||||
execute rat_test
|
|
||||||
execute build_from_source
|
|
||||||
execute build_tar_from_source
|
|
||||||
|
|
||||||
popd
|
|
||||||
popd
|
|
||||||
|
|
||||||
print_when_exit
|
|
|
@ -48,7 +48,7 @@ is_platform_change() {
|
||||||
declare in_path
|
declare in_path
|
||||||
in_path="${SOURCEDIR}"/"${1}"
|
in_path="${SOURCEDIR}"/"${1}"
|
||||||
|
|
||||||
for path in "${DOCKERFILE}" "${SOURCEDIR}"/dev-support/docker/pkg-resolver/*.json; do
|
for path in "${SOURCEDIR}"/dev-support/docker/Dockerfile* "${SOURCEDIR}"/dev-support/docker/pkg-resolver/*.json; do
|
||||||
if [ "${in_path}" == "${path}" ]; then
|
if [ "${in_path}" == "${path}" ]; then
|
||||||
echo "Found C/C++ platform related changes in ${in_path}"
|
echo "Found C/C++ platform related changes in ${in_path}"
|
||||||
return 0
|
return 0
|
||||||
|
@ -114,26 +114,6 @@ function check_ci_run() {
|
||||||
function run_ci() {
|
function run_ci() {
|
||||||
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/test-patch.sh"
|
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/test-patch.sh"
|
||||||
|
|
||||||
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
|
|
||||||
echo "Building in a Windows environment, skipping some Yetus related settings"
|
|
||||||
else
|
|
||||||
# run in docker mode and specifically point to our
|
|
||||||
# Dockerfile since we don't want to use the auto-pulled version.
|
|
||||||
YETUS_ARGS+=("--docker")
|
|
||||||
YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
|
|
||||||
YETUS_ARGS+=("--mvn-custom-repos")
|
|
||||||
YETUS_ARGS+=("--dockermemlimit=22g")
|
|
||||||
|
|
||||||
# test with Java 8 and 11
|
|
||||||
YETUS_ARGS+=("--java-home=/usr/lib/jvm/java-8-openjdk-amd64")
|
|
||||||
YETUS_ARGS+=("--multijdkdirs=/usr/lib/jvm/java-11-openjdk-amd64")
|
|
||||||
YETUS_ARGS+=("--multijdktests=compile")
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$IS_NIGHTLY_BUILD" && "$IS_NIGHTLY_BUILD" == 1 ]]; then
|
|
||||||
YETUS_ARGS+=("--empty-patch")
|
|
||||||
YETUS_ARGS+=("--branch=${BRANCH_NAME}")
|
|
||||||
else
|
|
||||||
# this must be clean for every run
|
# this must be clean for every run
|
||||||
if [[ -d "${PATCHDIR}" ]]; then
|
if [[ -d "${PATCHDIR}" ]]; then
|
||||||
rm -rf "${PATCHDIR:?}"
|
rm -rf "${PATCHDIR:?}"
|
||||||
|
@ -152,11 +132,6 @@ function run_ci() {
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# write Yetus report as GitHub comment (YETUS-1102)
|
|
||||||
YETUS_ARGS+=("--github-write-comment")
|
|
||||||
YETUS_ARGS+=("--github-use-emoji-vote")
|
|
||||||
fi
|
|
||||||
|
|
||||||
YETUS_ARGS+=("--patch-dir=${PATCHDIR}")
|
YETUS_ARGS+=("--patch-dir=${PATCHDIR}")
|
||||||
|
|
||||||
# where the source is located
|
# where the source is located
|
||||||
|
@ -181,6 +156,7 @@ function run_ci() {
|
||||||
# changing these to higher values may cause problems
|
# changing these to higher values may cause problems
|
||||||
# with other jobs on systemd-enabled machines
|
# with other jobs on systemd-enabled machines
|
||||||
YETUS_ARGS+=("--proclimit=5500")
|
YETUS_ARGS+=("--proclimit=5500")
|
||||||
|
YETUS_ARGS+=("--dockermemlimit=22g")
|
||||||
|
|
||||||
# -1 spotbugs issues that show up prior to the patch being applied
|
# -1 spotbugs issues that show up prior to the patch being applied
|
||||||
YETUS_ARGS+=("--spotbugs-strict-precheck")
|
YETUS_ARGS+=("--spotbugs-strict-precheck")
|
||||||
|
@ -199,15 +175,30 @@ function run_ci() {
|
||||||
# much attention to them
|
# much attention to them
|
||||||
YETUS_ARGS+=("--tests-filter=checkstyle")
|
YETUS_ARGS+=("--tests-filter=checkstyle")
|
||||||
|
|
||||||
|
# run in docker mode and specifically point to our
|
||||||
|
# Dockerfile since we don't want to use the auto-pulled version.
|
||||||
|
YETUS_ARGS+=("--docker")
|
||||||
|
YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
|
||||||
|
YETUS_ARGS+=("--mvn-custom-repos")
|
||||||
|
|
||||||
# effectively treat dev-suport as a custom maven module
|
# effectively treat dev-suport as a custom maven module
|
||||||
YETUS_ARGS+=("--skip-dirs=dev-support")
|
YETUS_ARGS+=("--skip-dirs=dev-support")
|
||||||
|
|
||||||
# help keep the ASF boxes clean
|
# help keep the ASF boxes clean
|
||||||
YETUS_ARGS+=("--sentinel")
|
YETUS_ARGS+=("--sentinel")
|
||||||
|
|
||||||
|
# test with Java 8 and 11
|
||||||
|
YETUS_ARGS+=("--java-home=/usr/lib/jvm/java-8-openjdk-amd64")
|
||||||
|
YETUS_ARGS+=("--multijdkdirs=/usr/lib/jvm/java-11-openjdk-amd64")
|
||||||
|
YETUS_ARGS+=("--multijdktests=compile")
|
||||||
|
|
||||||
# custom javadoc goals
|
# custom javadoc goals
|
||||||
YETUS_ARGS+=("--mvn-javadoc-goals=process-sources,javadoc:javadoc-no-fork")
|
YETUS_ARGS+=("--mvn-javadoc-goals=process-sources,javadoc:javadoc-no-fork")
|
||||||
|
|
||||||
|
# write Yetus report as GitHub comment (YETUS-1102)
|
||||||
|
YETUS_ARGS+=("--github-write-comment")
|
||||||
|
YETUS_ARGS+=("--github-use-emoji-vote")
|
||||||
|
|
||||||
"${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
|
"${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -98,6 +98,13 @@
|
||||||
<createSourcesJar>true</createSourcesJar>
|
<createSourcesJar>true</createSourcesJar>
|
||||||
<shadeSourcesContent>true</shadeSourcesContent>
|
<shadeSourcesContent>true</shadeSourcesContent>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>package</phase>
|
<phase>package</phase>
|
||||||
|
@ -154,9 +161,6 @@
|
||||||
<!-- Exclude snappy-java -->
|
<!-- Exclude snappy-java -->
|
||||||
<exclude>org/xerial/snappy/*</exclude>
|
<exclude>org/xerial/snappy/*</exclude>
|
||||||
<exclude>org/xerial/snappy/**/*</exclude>
|
<exclude>org/xerial/snappy/**/*</exclude>
|
||||||
<!-- Exclude org.widlfly.openssl -->
|
|
||||||
<exclude>org/wildfly/openssl/*</exclude>
|
|
||||||
<exclude>org/wildfly/openssl/**/*</exclude>
|
|
||||||
</excludes>
|
</excludes>
|
||||||
</relocation>
|
</relocation>
|
||||||
<relocation>
|
<relocation>
|
||||||
|
@ -247,7 +251,8 @@
|
||||||
</relocation>
|
</relocation>
|
||||||
</relocations>
|
</relocations>
|
||||||
<transformers>
|
<transformers>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
<!-- Needed until MSHADE-182 -->
|
||||||
|
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
||||||
<resource>NOTICE.txt</resource>
|
<resource>NOTICE.txt</resource>
|
||||||
|
|
|
@ -56,7 +56,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>extra-enforcer-rules</artifactId>
|
<artifactId>extra-enforcer-rules</artifactId>
|
||||||
<version>1.5.1</version>
|
<version>1.0-beta-3</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<executions>
|
<executions>
|
||||||
|
|
|
@ -60,7 +60,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>extra-enforcer-rules</artifactId>
|
<artifactId>extra-enforcer-rules</artifactId>
|
||||||
<version>1.5.1</version>
|
<version>1.0-beta-3</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<executions>
|
<executions>
|
||||||
|
|
|
@ -184,12 +184,6 @@
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.ow2.asm</groupId>
|
|
||||||
<artifactId>asm-commons</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
|
|
@ -332,10 +332,6 @@
|
||||||
<groupId>org.apache.hadoop.thirdparty</groupId>
|
<groupId>org.apache.hadoop.thirdparty</groupId>
|
||||||
<artifactId>hadoop-shaded-guava</artifactId>
|
<artifactId>hadoop-shaded-guava</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.ow2.asm</groupId>
|
|
||||||
<artifactId>asm-commons</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- Add optional runtime dependency on the in-development timeline server module
|
<!-- Add optional runtime dependency on the in-development timeline server module
|
||||||
|
@ -423,26 +419,30 @@
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-core</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-databind</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
|
||||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>javax.xml.bind</groupId>
|
<groupId>javax.xml.bind</groupId>
|
||||||
<artifactId>jaxb-api</artifactId>
|
<artifactId>jaxb-api</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-core-asl</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-mapper-asl</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-jaxrs</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-xc</artifactId>
|
||||||
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -454,20 +454,6 @@
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-servlet</artifactId>
|
<artifactId>jersey-servlet</artifactId>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>javax.servlet</groupId>
|
|
||||||
<artifactId>servlet-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>javax.enterprise</groupId>
|
|
||||||
<artifactId>cdi-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>ch.qos.cal10n</groupId>
|
|
||||||
<artifactId>cal10n-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- skip org.apache.avro:avro-ipc because it doesn't look like hadoop-common actually uses it -->
|
<!-- skip org.apache.avro:avro-ipc because it doesn't look like hadoop-common actually uses it -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -671,6 +657,13 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-shade-plugin</artifactId>
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>package</phase>
|
<phase>package</phase>
|
||||||
|
@ -697,6 +690,7 @@
|
||||||
<exclude>org.bouncycastle:*</exclude>
|
<exclude>org.bouncycastle:*</exclude>
|
||||||
<!-- Leave snappy that includes native methods which cannot be relocated. -->
|
<!-- Leave snappy that includes native methods which cannot be relocated. -->
|
||||||
<exclude>org.xerial.snappy:*</exclude>
|
<exclude>org.xerial.snappy:*</exclude>
|
||||||
|
<exclude>javax.ws.rs:javax.ws.rs-api</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
</artifactSet>
|
</artifactSet>
|
||||||
<filters>
|
<filters>
|
||||||
|
@ -749,12 +743,6 @@
|
||||||
<exclude>META-INF/versions/11/module-info.class</exclude>
|
<exclude>META-INF/versions/11/module-info.class</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
</filter>
|
</filter>
|
||||||
<filter>
|
|
||||||
<artifact>com.google.code.gson:gson</artifact>
|
|
||||||
<excludes>
|
|
||||||
<exclude>META-INF/versions/9/module-info.class</exclude>
|
|
||||||
</excludes>
|
|
||||||
</filter>
|
|
||||||
|
|
||||||
<!-- Mockito tries to include its own unrelocated copy of hamcrest. :( -->
|
<!-- Mockito tries to include its own unrelocated copy of hamcrest. :( -->
|
||||||
<filter>
|
<filter>
|
||||||
|
@ -1045,7 +1033,8 @@
|
||||||
</relocation>
|
</relocation>
|
||||||
</relocations>
|
</relocations>
|
||||||
<transformers>
|
<transformers>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
<!-- Needed until MSHADE-182 -->
|
||||||
|
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
||||||
<resources>
|
<resources>
|
||||||
|
|
|
@ -128,6 +128,13 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-shade-plugin</artifactId>
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>package</phase>
|
<phase>package</phase>
|
||||||
|
@ -148,7 +155,6 @@
|
||||||
<!-- Leave javax APIs that are stable -->
|
<!-- Leave javax APIs that are stable -->
|
||||||
<!-- the jdk ships part of the javax.annotation namespace, so if we want to relocate this we'll have to care it out by class :( -->
|
<!-- the jdk ships part of the javax.annotation namespace, so if we want to relocate this we'll have to care it out by class :( -->
|
||||||
<exclude>com.google.code.findbugs:jsr305</exclude>
|
<exclude>com.google.code.findbugs:jsr305</exclude>
|
||||||
<exclude>io.netty:*</exclude>
|
|
||||||
<exclude>io.dropwizard.metrics:metrics-core</exclude>
|
<exclude>io.dropwizard.metrics:metrics-core</exclude>
|
||||||
<exclude>org.eclipse.jetty:jetty-servlet</exclude>
|
<exclude>org.eclipse.jetty:jetty-servlet</exclude>
|
||||||
<exclude>org.eclipse.jetty:jetty-security</exclude>
|
<exclude>org.eclipse.jetty:jetty-security</exclude>
|
||||||
|
@ -157,8 +163,7 @@
|
||||||
<exclude>org.bouncycastle:*</exclude>
|
<exclude>org.bouncycastle:*</exclude>
|
||||||
<!-- Leave snappy that includes native methods which cannot be relocated. -->
|
<!-- Leave snappy that includes native methods which cannot be relocated. -->
|
||||||
<exclude>org.xerial.snappy:*</exclude>
|
<exclude>org.xerial.snappy:*</exclude>
|
||||||
<!-- leave out kotlin classes -->
|
<exclude>javax.ws.rs:javax.ws.rs-api</exclude>
|
||||||
<exclude>org.jetbrains.kotlin:*</exclude>
|
|
||||||
</excludes>
|
</excludes>
|
||||||
</artifactSet>
|
</artifactSet>
|
||||||
<filters>
|
<filters>
|
||||||
|
@ -244,13 +249,6 @@
|
||||||
<exclude>META-INF/versions/11/module-info.class</exclude>
|
<exclude>META-INF/versions/11/module-info.class</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
</filter>
|
</filter>
|
||||||
<filter>
|
|
||||||
<artifact>com.google.code.gson:gson</artifact>
|
|
||||||
<excludes>
|
|
||||||
<exclude>META-INF/versions/9/module-info.class</exclude>
|
|
||||||
</excludes>
|
|
||||||
</filter>
|
|
||||||
|
|
||||||
</filters>
|
</filters>
|
||||||
<relocations>
|
<relocations>
|
||||||
<relocation>
|
<relocation>
|
||||||
|
@ -393,7 +391,8 @@
|
||||||
-->
|
-->
|
||||||
</relocations>
|
</relocations>
|
||||||
<transformers>
|
<transformers>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
<!-- Needed until MSHADE-182 -->
|
||||||
|
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
|
||||||
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
|
||||||
<resources>
|
<resources>
|
||||||
|
|
|
@ -66,13 +66,9 @@
|
||||||
<artifactId>jersey-core</artifactId>
|
<artifactId>jersey-core</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-server</artifactId>
|
<artifactId>jersey-server</artifactId>
|
||||||
|
@ -118,18 +114,6 @@
|
||||||
<groupId>org.eclipse.jetty</groupId>
|
<groupId>org.eclipse.jetty</groupId>
|
||||||
<artifactId>jetty-server</artifactId>
|
<artifactId>jetty-server</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.jetbrains.kotlin</groupId>
|
|
||||||
<artifactId>kotlin-stdlib</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.jetbrains.kotlin</groupId>
|
|
||||||
<artifactId>kotlin-stdlib-common</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.squareup.okhttp3</groupId>
|
|
||||||
<artifactId>okhttp</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-core</artifactId>
|
<artifactId>jersey-core</artifactId>
|
||||||
|
@ -183,13 +167,9 @@
|
||||||
<artifactId>jersey-core</artifactId>
|
<artifactId>jersey-core</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty</artifactId>
|
<artifactId>netty</artifactId>
|
||||||
|
@ -238,13 +218,9 @@
|
||||||
<artifactId>jersey-server</artifactId>
|
<artifactId>jersey-server</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-servlet</artifactId>
|
<artifactId>jersey-servlet</artifactId>
|
||||||
|
@ -299,13 +275,9 @@
|
||||||
<artifactId>guice-servlet</artifactId>
|
<artifactId>guice-servlet</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty</artifactId>
|
<artifactId>netty</artifactId>
|
||||||
|
|
|
@ -101,10 +101,6 @@
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.projectlombok</groupId>
|
|
||||||
<artifactId>lombok</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -127,6 +123,11 @@
|
||||||
<artifactId>hadoop-azure-datalake</artifactId>
|
<artifactId>hadoop-azure-datalake</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-openstack</artifactId>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-cos</artifactId>
|
<artifactId>hadoop-cos</artifactId>
|
||||||
|
|
|
@ -109,7 +109,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.qcloud</groupId>
|
<groupId>com.qcloud</groupId>
|
||||||
<artifactId>cos_api-bundle</artifactId>
|
<artifactId>cos_api-bundle</artifactId>
|
||||||
<version>5.6.69</version>
|
<version>5.6.19</version>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
|
@ -104,10 +104,6 @@
|
||||||
<groupId>jdk.tools</groupId>
|
<groupId>jdk.tools</groupId>
|
||||||
<artifactId>jdk.tools</artifactId>
|
<artifactId>jdk.tools</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<groupId>org.javassist</groupId>
|
|
||||||
<artifactId>javassist</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -165,14 +161,6 @@
|
||||||
<artifactId>okio</artifactId>
|
<artifactId>okio</artifactId>
|
||||||
<groupId>com.squareup.okio</groupId>
|
<groupId>com.squareup.okio</groupId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
<exclusion>
|
|
||||||
<artifactId>log4j-core</artifactId>
|
|
||||||
<groupId>org.apache.logging.log4j</groupId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<artifactId>log4j-api</artifactId>
|
|
||||||
<groupId>org.apache.logging.log4j</groupId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -116,7 +116,6 @@ public class RequestLoggerFilter implements Filter {
|
||||||
public void addCookie(Cookie cookie) {
|
public void addCookie(Cookie cookie) {
|
||||||
super.addCookie(cookie);
|
super.addCookie(cookie);
|
||||||
List<String> cookies = getHeaderValues("Set-Cookie", false);
|
List<String> cookies = getHeaderValues("Set-Cookie", false);
|
||||||
cookies.addAll(getHeaderValues("set-cookie", false));
|
|
||||||
cookies.add(cookie.getName() + "=" + cookie.getValue());
|
cookies.add(cookie.getName() + "=" + cookie.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -110,8 +110,20 @@
|
||||||
<groupId>org.bouncycastle</groupId>
|
<groupId>org.bouncycastle</groupId>
|
||||||
<artifactId>bcprov-jdk15on</artifactId>
|
<artifactId>bcprov-jdk15on</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
|
<!-- HACK. Transitive dependency for nimbus-jose-jwt. Needed for
|
||||||
|
packaging. Please re-check this version when updating
|
||||||
|
nimbus-jose-jwt. Please read HADOOP-14903 for more details.
|
||||||
|
-->
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.minidev</groupId>
|
||||||
|
<artifactId>json-smart</artifactId>
|
||||||
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>net.minidev</groupId>
|
||||||
|
<artifactId>json-smart</artifactId>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
|
|
|
@ -92,9 +92,6 @@ public class AuthenticatedURL {
|
||||||
@Override
|
@Override
|
||||||
public void put(URI uri, Map<String, List<String>> responseHeaders) {
|
public void put(URI uri, Map<String, List<String>> responseHeaders) {
|
||||||
List<String> headers = responseHeaders.get("Set-Cookie");
|
List<String> headers = responseHeaders.get("Set-Cookie");
|
||||||
if (headers == null) {
|
|
||||||
headers = responseHeaders.get("set-cookie");
|
|
||||||
}
|
|
||||||
if (headers != null) {
|
if (headers != null) {
|
||||||
for (String header : headers) {
|
for (String header : headers) {
|
||||||
List<HttpCookie> cookies;
|
List<HttpCookie> cookies;
|
||||||
|
|
|
@ -280,9 +280,6 @@ public class KerberosAuthenticator implements Authenticator {
|
||||||
boolean negotiate = false;
|
boolean negotiate = false;
|
||||||
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
|
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
|
||||||
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
|
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
|
||||||
if (authHeader == null) {
|
|
||||||
authHeader = conn.getHeaderField(WWW_AUTHENTICATE.toLowerCase());
|
|
||||||
}
|
|
||||||
negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
|
negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
|
||||||
}
|
}
|
||||||
return negotiate;
|
return negotiate;
|
||||||
|
@ -391,9 +388,6 @@ public class KerberosAuthenticator implements Authenticator {
|
||||||
int status = conn.getResponseCode();
|
int status = conn.getResponseCode();
|
||||||
if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
|
if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
|
||||||
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
|
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
|
||||||
if (authHeader == null) {
|
|
||||||
authHeader = conn.getHeaderField(WWW_AUTHENTICATE.toLowerCase());
|
|
||||||
}
|
|
||||||
if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
|
if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
|
||||||
throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE +
|
throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE +
|
||||||
"' header incorrect: " + authHeader);
|
"' header incorrect: " + authHeader);
|
||||||
|
|
|
@ -616,9 +616,7 @@ public class AuthenticationFilter implements Filter {
|
||||||
// present.. reset to 403 if not found..
|
// present.. reset to 403 if not found..
|
||||||
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
|
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
|
||||||
&& (!httpResponse.containsHeader(
|
&& (!httpResponse.containsHeader(
|
||||||
KerberosAuthenticator.WWW_AUTHENTICATE)
|
KerberosAuthenticator.WWW_AUTHENTICATE))) {
|
||||||
&& !httpResponse.containsHeader(
|
|
||||||
KerberosAuthenticator.WWW_AUTHENTICATE.toLowerCase()))) {
|
|
||||||
errCode = HttpServletResponse.SC_FORBIDDEN;
|
errCode = HttpServletResponse.SC_FORBIDDEN;
|
||||||
}
|
}
|
||||||
// After Jetty 9.4.21, sendError() no longer allows a custom message.
|
// After Jetty 9.4.21, sendError() no longer allows a custom message.
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.security.authentication.util;
|
package org.apache.hadoop.security.authentication.util;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PublicKey;
|
import java.security.PublicKey;
|
||||||
import java.security.cert.CertificateException;
|
import java.security.cert.CertificateException;
|
||||||
|
|
|
@ -1,77 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License. See accompanying LICENSE file.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.security.authentication.util;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import javax.security.auth.login.AppConfigurationEntry;
|
|
||||||
import javax.security.auth.login.Configuration;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a programmatic version of a jaas.conf file. This can be used
|
|
||||||
* instead of writing a jaas.conf file and setting the system property,
|
|
||||||
* "java.security.auth.login.config", to point to that file. It is meant to be
|
|
||||||
* used for connecting to ZooKeeper.
|
|
||||||
*/
|
|
||||||
public class JaasConfiguration extends Configuration {
|
|
||||||
|
|
||||||
private final javax.security.auth.login.Configuration baseConfig =
|
|
||||||
javax.security.auth.login.Configuration.getConfiguration();
|
|
||||||
private final AppConfigurationEntry[] entry;
|
|
||||||
private final String entryName;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add an entry to the jaas configuration with the passed in name,
|
|
||||||
* principal, and keytab. The other necessary options will be set for you.
|
|
||||||
*
|
|
||||||
* @param entryName The name of the entry (e.g. "Client")
|
|
||||||
* @param principal The principal of the user
|
|
||||||
* @param keytab The location of the keytab
|
|
||||||
*/
|
|
||||||
public JaasConfiguration(String entryName, String principal, String keytab) {
|
|
||||||
this.entryName = entryName;
|
|
||||||
Map<String, String> options = new HashMap<>();
|
|
||||||
options.put("keyTab", keytab);
|
|
||||||
options.put("principal", principal);
|
|
||||||
options.put("useKeyTab", "true");
|
|
||||||
options.put("storeKey", "true");
|
|
||||||
options.put("useTicketCache", "false");
|
|
||||||
options.put("refreshKrb5Config", "true");
|
|
||||||
String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
|
|
||||||
if ("true".equalsIgnoreCase(jaasEnvVar)) {
|
|
||||||
options.put("debug", "true");
|
|
||||||
}
|
|
||||||
entry = new AppConfigurationEntry[]{
|
|
||||||
new AppConfigurationEntry(getKrb5LoginModuleName(),
|
|
||||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
|
||||||
options)};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
|
|
||||||
return (entryName.equals(name)) ? entry : ((baseConfig != null)
|
|
||||||
? baseConfig.getAppConfigurationEntry(name) : null);
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getKrb5LoginModuleName() {
|
|
||||||
String krb5LoginModuleName;
|
|
||||||
if (System.getProperty("java.vendor").contains("IBM")) {
|
|
||||||
krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
|
|
||||||
} else {
|
|
||||||
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
|
|
||||||
}
|
|
||||||
return krb5LoginModuleName;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -236,7 +236,7 @@ public class KerberosUtil {
|
||||||
*/
|
*/
|
||||||
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
|
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
|
||||||
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
|
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
|
||||||
Set<String> principals = new HashSet<>();
|
Set<String> principals = new HashSet<String>();
|
||||||
List<PrincipalName> entries = keytab.getPrincipals();
|
List<PrincipalName> entries = keytab.getPrincipals();
|
||||||
for (PrincipalName entry : entries) {
|
for (PrincipalName entry : entries) {
|
||||||
principals.add(entry.getName().replace("\\", "/"));
|
principals.add(entry.getName().replace("\\", "/"));
|
||||||
|
|
|
@ -17,9 +17,12 @@ import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.security.SecureRandom;
|
import java.security.SecureRandom;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
import javax.security.auth.login.AppConfigurationEntry;
|
||||||
import javax.security.auth.login.Configuration;
|
import javax.security.auth.login.Configuration;
|
||||||
import javax.servlet.ServletContext;
|
import javax.servlet.ServletContext;
|
||||||
import org.apache.curator.RetryPolicy;
|
import org.apache.curator.RetryPolicy;
|
||||||
|
@ -426,4 +429,62 @@ public class ZKSignerSecretProvider extends RolloverSignerSecretProvider {
|
||||||
return saslACL;
|
return saslACL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a programmatic version of a jaas.conf file. This can be used
|
||||||
|
* instead of writing a jaas.conf file and setting the system property,
|
||||||
|
* "java.security.auth.login.config", to point to that file. It is meant to be
|
||||||
|
* used for connecting to ZooKeeper.
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.Private
|
||||||
|
public static class JaasConfiguration extends Configuration {
|
||||||
|
|
||||||
|
private final javax.security.auth.login.Configuration baseConfig =
|
||||||
|
javax.security.auth.login.Configuration.getConfiguration();
|
||||||
|
private static AppConfigurationEntry[] entry;
|
||||||
|
private String entryName;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add an entry to the jaas configuration with the passed in name,
|
||||||
|
* principal, and keytab. The other necessary options will be set for you.
|
||||||
|
*
|
||||||
|
* @param entryName The name of the entry (e.g. "Client")
|
||||||
|
* @param principal The principal of the user
|
||||||
|
* @param keytab The location of the keytab
|
||||||
|
*/
|
||||||
|
public JaasConfiguration(String entryName, String principal, String keytab) {
|
||||||
|
this.entryName = entryName;
|
||||||
|
Map<String, String> options = new HashMap<String, String>();
|
||||||
|
options.put("keyTab", keytab);
|
||||||
|
options.put("principal", principal);
|
||||||
|
options.put("useKeyTab", "true");
|
||||||
|
options.put("storeKey", "true");
|
||||||
|
options.put("useTicketCache", "false");
|
||||||
|
options.put("refreshKrb5Config", "true");
|
||||||
|
String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
|
||||||
|
if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
|
||||||
|
options.put("debug", "true");
|
||||||
|
}
|
||||||
|
entry = new AppConfigurationEntry[]{
|
||||||
|
new AppConfigurationEntry(getKrb5LoginModuleName(),
|
||||||
|
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
|
||||||
|
options)};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
|
||||||
|
return (entryName.equals(name)) ? entry : ((baseConfig != null)
|
||||||
|
? baseConfig.getAppConfigurationEntry(name) : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getKrb5LoginModuleName() {
|
||||||
|
String krb5LoginModuleName;
|
||||||
|
if (System.getProperty("java.vendor").contains("IBM")) {
|
||||||
|
krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
|
||||||
|
} else {
|
||||||
|
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
|
||||||
|
}
|
||||||
|
return krb5LoginModuleName;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,10 +18,6 @@
|
||||||
|
|
||||||
package org.apache.hadoop.util;
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.security.AccessController;
|
|
||||||
import java.security.PrivilegedAction;
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
|
@ -37,71 +33,21 @@ public class PlatformName {
|
||||||
* per the java-vm.
|
* per the java-vm.
|
||||||
*/
|
*/
|
||||||
public static final String PLATFORM_NAME =
|
public static final String PLATFORM_NAME =
|
||||||
(System.getProperty("os.name").startsWith("Windows") ?
|
(System.getProperty("os.name").startsWith("Windows")
|
||||||
System.getenv("os") : System.getProperty("os.name"))
|
? System.getenv("os") : System.getProperty("os.name"))
|
||||||
+ "-" + System.getProperty("os.arch") + "-"
|
+ "-" + System.getProperty("os.arch")
|
||||||
+ System.getProperty("sun.arch.data.model");
|
+ "-" + System.getProperty("sun.arch.data.model");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The java vendor name used in this platform.
|
* The java vendor name used in this platform.
|
||||||
*/
|
*/
|
||||||
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
|
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
|
||||||
|
|
||||||
/**
|
|
||||||
* Define a system class accessor that is open to changes in underlying implementations
|
|
||||||
* of the system class loader modules.
|
|
||||||
*/
|
|
||||||
private static final class SystemClassAccessor extends ClassLoader {
|
|
||||||
public Class<?> getSystemClass(String className) throws ClassNotFoundException {
|
|
||||||
return findSystemClass(className);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A public static variable to indicate the current java vendor is
|
* A public static variable to indicate the current java vendor is
|
||||||
* IBM and the type is Java Technology Edition which provides its
|
* IBM java or not.
|
||||||
* own implementations of many security packages and Cipher suites.
|
|
||||||
* Note that these are not provided in Semeru runtimes:
|
|
||||||
* See https://developer.ibm.com/languages/java/semeru-runtimes for details.
|
|
||||||
*/
|
*/
|
||||||
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM") &&
|
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
|
||||||
hasIbmTechnologyEditionModules();
|
|
||||||
|
|
||||||
private static boolean hasIbmTechnologyEditionModules() {
|
|
||||||
return Arrays.asList(
|
|
||||||
"com.ibm.security.auth.module.JAASLoginModule",
|
|
||||||
"com.ibm.security.auth.module.Win64LoginModule",
|
|
||||||
"com.ibm.security.auth.module.NTLoginModule",
|
|
||||||
"com.ibm.security.auth.module.AIX64LoginModule",
|
|
||||||
"com.ibm.security.auth.module.LinuxLoginModule",
|
|
||||||
"com.ibm.security.auth.module.Krb5LoginModule"
|
|
||||||
).stream().anyMatch((module) -> isSystemClassAvailable(module));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In rare cases where different behaviour is performed based on the JVM vendor
|
|
||||||
* this method should be used to test for a unique JVM class provided by the
|
|
||||||
* vendor rather than using the vendor method. For example if on JVM provides a
|
|
||||||
* different Kerberos login module testing for that login module being loadable
|
|
||||||
* before configuring to use it is preferable to using the vendor data.
|
|
||||||
*
|
|
||||||
* @param className the name of a class in the JVM to test for
|
|
||||||
* @return true if the class is available, false otherwise.
|
|
||||||
*/
|
|
||||||
private static boolean isSystemClassAvailable(String className) {
|
|
||||||
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
|
|
||||||
try {
|
|
||||||
// Using ClassLoader.findSystemClass() instead of
|
|
||||||
// Class.forName(className, false, null) because Class.forName with a null
|
|
||||||
// ClassLoader only looks at the boot ClassLoader with Java 9 and above
|
|
||||||
// which doesn't look at all the modules available to the findSystemClass.
|
|
||||||
new SystemClassAccessor().getSystemClass(className);
|
|
||||||
return true;
|
|
||||||
} catch (Exception ignored) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
System.out.println(PLATFORM_NAME);
|
System.out.println(PLATFORM_NAME);
|
||||||
|
|
|
@ -24,7 +24,7 @@ This filter must be configured in front of all the web application resources tha
|
||||||
|
|
||||||
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
|
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
|
||||||
|
|
||||||
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part of the web application classpath as well as the Log4j configuration file.
|
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application classpath as well as the Log4j configuration file.
|
||||||
|
|
||||||
### Common Configuration parameters
|
### Common Configuration parameters
|
||||||
|
|
||||||
|
|
|
@ -108,9 +108,9 @@ public class KerberosTestUtils {
|
||||||
public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
|
public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
|
||||||
LoginContext loginContext = null;
|
LoginContext loginContext = null;
|
||||||
try {
|
try {
|
||||||
Set<Principal> principals = new HashSet<>();
|
Set<Principal> principals = new HashSet<Principal>();
|
||||||
principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
|
principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
|
||||||
Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());
|
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
|
||||||
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
|
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
|
||||||
loginContext.login();
|
loginContext.login();
|
||||||
subject = loginContext.getSubject();
|
subject = loginContext.getSubject();
|
||||||
|
|
|
@ -89,44 +89,6 @@ public class TestAuthenticatedURL {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testExtractTokenCookieHeader() throws Exception {
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK);
|
|
||||||
|
|
||||||
String tokenStr = "foo";
|
|
||||||
Map<String, List<String>> headers = new HashMap<>();
|
|
||||||
List<String> cookies = new ArrayList<>();
|
|
||||||
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
|
|
||||||
headers.put("Set-Cookie", cookies);
|
|
||||||
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
|
|
||||||
|
|
||||||
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
|
||||||
AuthenticatedURL.extractToken(conn, token);
|
|
||||||
|
|
||||||
Assert.assertTrue(token.isSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testExtractTokenLowerCaseCookieHeader() throws Exception {
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK);
|
|
||||||
|
|
||||||
String tokenStr = "foo";
|
|
||||||
Map<String, List<String>> headers = new HashMap<>();
|
|
||||||
List<String> cookies = new ArrayList<>();
|
|
||||||
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
|
|
||||||
headers.put("set-cookie", cookies);
|
|
||||||
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
|
|
||||||
|
|
||||||
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
|
||||||
AuthenticatedURL.extractToken(conn, token);
|
|
||||||
|
|
||||||
Assert.assertTrue(token.isSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConnectionConfigurator() throws Exception {
|
public void testConnectionConfigurator() throws Exception {
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
||||||
|
|
|
@ -21,13 +21,8 @@ import static org.apache.hadoop.security.authentication.server.KerberosAuthentic
|
||||||
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.NAME_RULES;
|
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.NAME_RULES;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.InvocationTargetException;
|
|
||||||
import java.lang.reflect.Method;
|
|
||||||
import java.nio.charset.CharacterCodingException;
|
import java.nio.charset.CharacterCodingException;
|
||||||
import javax.security.sasl.AuthenticationException;
|
import javax.security.sasl.AuthenticationException;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
|
||||||
import org.apache.commons.lang3.reflect.FieldUtils;
|
|
||||||
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
|
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
|
||||||
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
|
@ -37,12 +32,10 @@ import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHa
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
|
@ -255,79 +248,4 @@ public class TestKerberosAuthenticator extends KerberosSecurityTestcase {
|
||||||
Assert.assertTrue(ex.equals(ex2));
|
Assert.assertTrue(ex.equals(ex2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 60000)
|
|
||||||
public void testNegotiate() throws NoSuchMethodException, InvocationTargetException,
|
|
||||||
IllegalAccessException, IOException {
|
|
||||||
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
|
|
||||||
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
Mockito.when(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE)).
|
|
||||||
thenReturn(KerberosAuthenticator.NEGOTIATE);
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
|
|
||||||
|
|
||||||
Method method = KerberosAuthenticator.class.getDeclaredMethod("isNegotiate",
|
|
||||||
HttpURLConnection.class);
|
|
||||||
method.setAccessible(true);
|
|
||||||
|
|
||||||
Assert.assertTrue((boolean)method.invoke(kerberosAuthenticator, conn));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test(timeout = 60000)
|
|
||||||
public void testNegotiateLowerCase() throws NoSuchMethodException, InvocationTargetException,
|
|
||||||
IllegalAccessException, IOException {
|
|
||||||
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
|
|
||||||
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
Mockito.when(conn.getHeaderField("www-authenticate"))
|
|
||||||
.thenReturn(KerberosAuthenticator.NEGOTIATE);
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
|
|
||||||
|
|
||||||
Method method = KerberosAuthenticator.class.getDeclaredMethod("isNegotiate",
|
|
||||||
HttpURLConnection.class);
|
|
||||||
method.setAccessible(true);
|
|
||||||
|
|
||||||
Assert.assertTrue((boolean)method.invoke(kerberosAuthenticator, conn));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test(timeout = 60000)
|
|
||||||
public void testReadToken() throws NoSuchMethodException, IOException, IllegalAccessException,
|
|
||||||
InvocationTargetException {
|
|
||||||
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
|
|
||||||
FieldUtils.writeField(kerberosAuthenticator, "base64", new Base64(), true);
|
|
||||||
|
|
||||||
Base64 base64 = new Base64();
|
|
||||||
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
|
|
||||||
Mockito.when(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE))
|
|
||||||
.thenReturn(KerberosAuthenticator.NEGOTIATE + " " +
|
|
||||||
Arrays.toString(base64.encode("foobar".getBytes())));
|
|
||||||
|
|
||||||
Method method = KerberosAuthenticator.class.getDeclaredMethod("readToken",
|
|
||||||
HttpURLConnection.class);
|
|
||||||
method.setAccessible(true);
|
|
||||||
|
|
||||||
method.invoke(kerberosAuthenticator, conn); // expecting this not to throw an exception
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test(timeout = 60000)
|
|
||||||
public void testReadTokenLowerCase() throws NoSuchMethodException, IOException,
|
|
||||||
IllegalAccessException, InvocationTargetException {
|
|
||||||
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
|
|
||||||
FieldUtils.writeField(kerberosAuthenticator, "base64", new Base64(), true);
|
|
||||||
|
|
||||||
Base64 base64 = new Base64();
|
|
||||||
|
|
||||||
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
|
|
||||||
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
|
|
||||||
Mockito.when(conn.getHeaderField("www-authenticate"))
|
|
||||||
.thenReturn(KerberosAuthenticator.NEGOTIATE +
|
|
||||||
Arrays.toString(base64.encode("foobar".getBytes())));
|
|
||||||
|
|
||||||
Method method = KerberosAuthenticator.class.getDeclaredMethod("readToken",
|
|
||||||
HttpURLConnection.class);
|
|
||||||
method.setAccessible(true);
|
|
||||||
|
|
||||||
method.invoke(kerberosAuthenticator, conn); // expecting this not to throw an exception
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -574,44 +574,6 @@ public class TestAuthenticationFilter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDoFilterNotAuthenticatedLowerCase() throws Exception {
|
|
||||||
AuthenticationFilter filter = new AuthenticationFilter();
|
|
||||||
try {
|
|
||||||
FilterConfig config = Mockito.mock(FilterConfig.class);
|
|
||||||
Mockito.when(config.getInitParameter("management.operation.return")).
|
|
||||||
thenReturn("true");
|
|
||||||
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
|
|
||||||
DummyAuthenticationHandler.class.getName());
|
|
||||||
Mockito.when(config.getInitParameterNames()).thenReturn(
|
|
||||||
new Vector<>(
|
|
||||||
Arrays.asList(AuthenticationFilter.AUTH_TYPE,
|
|
||||||
"management.operation.return")).elements());
|
|
||||||
getMockedServletContextWithStringSigner(config);
|
|
||||||
filter.init(config);
|
|
||||||
|
|
||||||
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
|
|
||||||
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
|
|
||||||
|
|
||||||
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
|
|
||||||
|
|
||||||
FilterChain chain = Mockito.mock(FilterChain.class);
|
|
||||||
|
|
||||||
Mockito.doAnswer((Answer<Object>) invocation -> {
|
|
||||||
Assert.fail();
|
|
||||||
return null;
|
|
||||||
}).when(chain).doFilter(any(), any());
|
|
||||||
|
|
||||||
Mockito.when(response.containsHeader("www-authenticate")).thenReturn(true);
|
|
||||||
filter.doFilter(request, response, chain);
|
|
||||||
|
|
||||||
Mockito.verify(response).sendError(
|
|
||||||
HttpServletResponse.SC_UNAUTHORIZED, "Authentication required");
|
|
||||||
} finally {
|
|
||||||
filter.destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void _testDoFilterAuthentication(boolean withDomainPath,
|
private void _testDoFilterAuthentication(boolean withDomainPath,
|
||||||
boolean invalidToken,
|
boolean invalidToken,
|
||||||
boolean expired) throws Exception {
|
boolean expired) throws Exception {
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.security.interfaces.RSAPublicKey;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import java.util.Vector;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
import javax.servlet.ServletException;
|
import javax.servlet.ServletException;
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security.authentication.server;
|
package org.apache.hadoop.security.authentication.server;
|
||||||
|
|
||||||
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
|
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
|
@ -18,6 +18,7 @@ import java.util.Properties;
|
||||||
import javax.servlet.ServletContext;
|
import javax.servlet.ServletContext;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
|
|
||||||
|
|
|
@ -32,8 +32,8 @@ public class TestJaasConfiguration {
|
||||||
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
|
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
|
||||||
}
|
}
|
||||||
|
|
||||||
JaasConfiguration jConf =
|
ZKSignerSecretProvider.JaasConfiguration jConf =
|
||||||
new JaasConfiguration("foo", "foo/localhost",
|
new ZKSignerSecretProvider.JaasConfiguration("foo", "foo/localhost",
|
||||||
"/some/location/foo.keytab");
|
"/some/location/foo.keytab");
|
||||||
AppConfigurationEntry[] entries = jConf.getAppConfigurationEntry("bar");
|
AppConfigurationEntry[] entries = jConf.getAppConfigurationEntry("bar");
|
||||||
Assert.assertNull(entries);
|
Assert.assertNull(entries);
|
||||||
|
|
|
@ -379,6 +379,21 @@
|
||||||
<Bug code="JLM" />
|
<Bug code="JLM" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
OpenStack Swift FS module -closes streams in a different method
|
||||||
|
from where they are opened.
|
||||||
|
-->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
|
||||||
|
<Method name="uploadFileAttempt"/>
|
||||||
|
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
|
||||||
|
<Method name="uploadFilePartAttempt"/>
|
||||||
|
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
|
||||||
|
</Match>
|
||||||
|
|
||||||
<!-- code from maven source, null value is checked at callee side. -->
|
<!-- code from maven source, null value is checked at callee side. -->
|
||||||
<Match>
|
<Match>
|
||||||
<Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />
|
<Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -141,53 +141,23 @@
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-servlet</artifactId>
|
<artifactId>jersey-servlet</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>javax.enterprise</groupId>
|
|
||||||
<artifactId>cdi-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>javax.servlet</groupId>
|
|
||||||
<artifactId>servlet-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>ch.qos.cal10n</groupId>
|
|
||||||
<artifactId>cal10n-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.pjfanning</groupId>
|
<!-- Used, even though 'mvn dependency:analyze' doesn't find it -->
|
||||||
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-core</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-databind</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
|
||||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<!--
|
|
||||||
adding jettison as direct dependency (as jersey-json's jettison dependency is vulnerable with verison 1.1),
|
|
||||||
so those who depends on hadoop-common externally will get the non-vulnerable jettison
|
|
||||||
-->
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-server</artifactId>
|
<artifactId>jersey-server</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-logging</groupId>
|
||||||
|
<artifactId>commons-logging</artifactId>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>log4j</groupId>
|
<groupId>log4j</groupId>
|
||||||
<artifactId>log4j</artifactId>
|
<artifactId>log4j</artifactId>
|
||||||
|
@ -203,6 +173,11 @@
|
||||||
<artifactId>assertj-core</artifactId>
|
<artifactId>assertj-core</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.glassfish.grizzly</groupId>
|
||||||
|
<artifactId>grizzly-http-servlet</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-beanutils</groupId>
|
<groupId>commons-beanutils</groupId>
|
||||||
<artifactId>commons-beanutils</artifactId>
|
<artifactId>commons-beanutils</artifactId>
|
||||||
|
@ -212,12 +187,6 @@
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-configuration2</artifactId>
|
<artifactId>commons-configuration2</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>javax.servlet</groupId>
|
|
||||||
<artifactId>servlet-api</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
|
@ -340,14 +309,6 @@
|
||||||
</exclusion>
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>io.netty</groupId>
|
|
||||||
<artifactId>netty-handler</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.netty</groupId>
|
|
||||||
<artifactId>netty-transport-native-epoll</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.dropwizard.metrics</groupId>
|
<groupId>io.dropwizard.metrics</groupId>
|
||||||
<artifactId>metrics-core</artifactId>
|
<artifactId>metrics-core</artifactId>
|
||||||
|
@ -389,11 +350,6 @@
|
||||||
<artifactId>mockwebserver</artifactId>
|
<artifactId>mockwebserver</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>com.squareup.okio</groupId>
|
|
||||||
<artifactId>okio-jvm</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>dnsjava</groupId>
|
<groupId>dnsjava</groupId>
|
||||||
<artifactId>dnsjava</artifactId>
|
<artifactId>dnsjava</artifactId>
|
||||||
|
@ -660,10 +616,9 @@
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<executable>${shell-executable}</executable>
|
<executable>${basedir}/../../dev-support/bin/shelldocs</executable>
|
||||||
<workingDirectory>src/site/markdown</workingDirectory>
|
<workingDirectory>src/site/markdown</workingDirectory>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}/../../dev-support/bin/shelldocs</argument>
|
|
||||||
<argument>--skipprnorep</argument>
|
<argument>--skipprnorep</argument>
|
||||||
<argument>--output</argument>
|
<argument>--output</argument>
|
||||||
<argument>${basedir}/src/site/markdown/UnixShellAPI.md</argument>
|
<argument>${basedir}/src/site/markdown/UnixShellAPI.md</argument>
|
||||||
|
@ -853,36 +808,6 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<!--Sets the skip.platformToolsetDetection to true if use.platformToolsetVersion is specified.
|
|
||||||
This implies that the automatic detection of which platform toolset to use will be skipped
|
|
||||||
and the one specified with use.platformToolsetVersion will be used.-->
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
|
||||||
<version>1.8</version>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<phase>validate</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>run</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<exportAntProperties>true</exportAntProperties>
|
|
||||||
<target>
|
|
||||||
<condition property="skip.platformToolsetDetection" value="true" else="false">
|
|
||||||
<isset property="use.platformToolsetVersion"/>
|
|
||||||
</condition>
|
|
||||||
<!--Unfortunately, Maven doesn't have a way to negate a flag, thus we declare a
|
|
||||||
property which holds the negated value of skip.platformToolsetDetection.-->
|
|
||||||
<condition property="skip.platformToolsetDetection.negated" value="false" else="true">
|
|
||||||
<isset property="use.platformToolsetVersion"/>
|
|
||||||
</condition>
|
|
||||||
<echo>Skip platform toolset version detection = ${skip.platformToolsetDetection}</echo>
|
|
||||||
</target>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
|
@ -894,7 +819,6 @@
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>${skip.platformToolsetDetection}</skip>
|
|
||||||
<executable>${basedir}\..\..\dev-support\bin\win-vs-upgrade.cmd</executable>
|
<executable>${basedir}\..\..\dev-support\bin\win-vs-upgrade.cmd</executable>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}\src\main\winutils</argument>
|
<argument>${basedir}\src\main\winutils</argument>
|
||||||
|
@ -909,7 +833,6 @@
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>${skip.platformToolsetDetection}</skip>
|
|
||||||
<executable>msbuild</executable>
|
<executable>msbuild</executable>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}/src/main/winutils/winutils.sln</argument>
|
<argument>${basedir}/src/main/winutils/winutils.sln</argument>
|
||||||
|
@ -922,27 +845,6 @@
|
||||||
</arguments>
|
</arguments>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
<execution>
|
|
||||||
<id>compile-ms-winutils-using-build-tools</id>
|
|
||||||
<phase>compile</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>exec</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<skip>${skip.platformToolsetDetection.negated}</skip>
|
|
||||||
<executable>msbuild</executable>
|
|
||||||
<arguments>
|
|
||||||
<argument>${basedir}/src/main/winutils/winutils.sln</argument>
|
|
||||||
<argument>/nologo</argument>
|
|
||||||
<argument>/p:Configuration=Release</argument>
|
|
||||||
<argument>/p:OutDir=${project.build.directory}/bin/</argument>
|
|
||||||
<argument>/p:IntermediateOutputPath=${project.build.directory}/winutils/</argument>
|
|
||||||
<argument>/p:WsceConfigDir=${wsce.config.dir}</argument>
|
|
||||||
<argument>/p:WsceConfigFile=${wsce.config.file}</argument>
|
|
||||||
<argument>/p:PlatformToolset=${use.platformToolsetVersion}</argument>
|
|
||||||
</arguments>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
<execution>
|
<execution>
|
||||||
<id>convert-ms-native-dll</id>
|
<id>convert-ms-native-dll</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
|
@ -950,7 +852,6 @@
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>${skip.platformToolsetDetection}</skip>
|
|
||||||
<executable>${basedir}\..\..\dev-support\bin\win-vs-upgrade.cmd</executable>
|
<executable>${basedir}\..\..\dev-support\bin\win-vs-upgrade.cmd</executable>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}\src\main\native</argument>
|
<argument>${basedir}\src\main\native</argument>
|
||||||
|
@ -965,7 +866,6 @@
|
||||||
<goal>exec</goal>
|
<goal>exec</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>${skip.platformToolsetDetection}</skip>
|
|
||||||
<executable>msbuild</executable>
|
<executable>msbuild</executable>
|
||||||
<arguments>
|
<arguments>
|
||||||
<argument>${basedir}/src/main/native/native.sln</argument>
|
<argument>${basedir}/src/main/native/native.sln</argument>
|
||||||
|
@ -986,35 +886,6 @@
|
||||||
</arguments>
|
</arguments>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
<execution>
|
|
||||||
<id>compile-ms-native-dll-using-build-tools</id>
|
|
||||||
<phase>compile</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>exec</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<skip>${skip.platformToolsetDetection.negated}</skip>
|
|
||||||
<executable>msbuild</executable>
|
|
||||||
<arguments>
|
|
||||||
<argument>${basedir}/src/main/native/native.sln</argument>
|
|
||||||
<argument>/nologo</argument>
|
|
||||||
<argument>/p:Configuration=Release</argument>
|
|
||||||
<argument>/p:OutDir=${project.build.directory}/bin/</argument>
|
|
||||||
<argument>/p:CustomZstdPrefix=${zstd.prefix}</argument>
|
|
||||||
<argument>/p:CustomZstdLib=${zstd.lib}</argument>
|
|
||||||
<argument>/p:CustomZstdInclude=${zstd.include}</argument>
|
|
||||||
<argument>/p:RequireZstd=${require.zstd}</argument>
|
|
||||||
<argument>/p:CustomOpensslPrefix=${openssl.prefix}</argument>
|
|
||||||
<argument>/p:CustomOpensslLib=${openssl.lib}</argument>
|
|
||||||
<argument>/p:CustomOpensslInclude=${openssl.include}</argument>
|
|
||||||
<argument>/p:RequireOpenssl=${require.openssl}</argument>
|
|
||||||
<argument>/p:RequireIsal=${require.isal}</argument>
|
|
||||||
<argument>/p:CustomIsalPrefix=${isal.prefix}</argument>
|
|
||||||
<argument>/p:CustomIsalLib=${isal.lib}</argument>
|
|
||||||
<argument>/p:PlatformToolset=${use.platformToolsetVersion}</argument>
|
|
||||||
</arguments>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
|
@ -1247,7 +1118,7 @@
|
||||||
<id>src-test-compile-protoc-legacy</id>
|
<id>src-test-compile-protoc-legacy</id>
|
||||||
<phase>generate-test-sources</phase>
|
<phase>generate-test-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>test-compile</goal>
|
<goal>compile</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>false</skip>
|
<skip>false</skip>
|
||||||
|
@ -1256,7 +1127,7 @@
|
||||||
com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
|
com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
|
||||||
</protocArtifact>
|
</protocArtifact>
|
||||||
<includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
|
<includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
|
||||||
<protoTestSourceRoot>${basedir}/src/test/proto</protoTestSourceRoot>
|
<protoSourceRoot>${basedir}/src/test/proto</protoSourceRoot>
|
||||||
<outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
|
<outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
|
||||||
<clearOutputDirectory>false</clearOutputDirectory>
|
<clearOutputDirectory>false</clearOutputDirectory>
|
||||||
<includes>
|
<includes>
|
||||||
|
@ -1267,16 +1138,6 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-javadoc-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<sourceFileExcludes>
|
|
||||||
<sourceFileExclude>**/FSProtos.java</sourceFileExclude>
|
|
||||||
</sourceFileExcludes>
|
|
||||||
<excludePackageNames>*.proto:*.tracing:*.protobuf</excludePackageNames>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
|
|
@ -26,9 +26,9 @@ MYNAME="${BASH_SOURCE-$0}"
|
||||||
function hadoop_usage
|
function hadoop_usage
|
||||||
{
|
{
|
||||||
hadoop_add_option "buildpaths" "attempt to add class files from build tree"
|
hadoop_add_option "buildpaths" "attempt to add class files from build tree"
|
||||||
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in worker mode"
|
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode"
|
||||||
hadoop_add_option "loglevel level" "set the log4j level for this command"
|
hadoop_add_option "loglevel level" "set the log4j level for this command"
|
||||||
hadoop_add_option "hosts filename" "list of hosts to use in worker mode"
|
hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
|
||||||
hadoop_add_option "workers" "turn on worker mode"
|
hadoop_add_option "workers" "turn on worker mode"
|
||||||
|
|
||||||
hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"
|
hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
# Run a Hadoop command on all worker hosts.
|
# Run a Hadoop command on all slave hosts.
|
||||||
|
|
||||||
function hadoop_usage
|
function hadoop_usage
|
||||||
{
|
{
|
||||||
|
|
|
@ -53,10 +53,6 @@
|
||||||
# variable is REQUIRED on ALL platforms except OS X!
|
# variable is REQUIRED on ALL platforms except OS X!
|
||||||
# export JAVA_HOME=
|
# export JAVA_HOME=
|
||||||
|
|
||||||
# The language environment in which Hadoop runs. Use the English
|
|
||||||
# environment to ensure that logs are printed as expected.
|
|
||||||
export LANG=en_US.UTF-8
|
|
||||||
|
|
||||||
# Location of Hadoop. By default, Hadoop will attempt to determine
|
# Location of Hadoop. By default, Hadoop will attempt to determine
|
||||||
# this location based upon its execution path.
|
# this location based upon its execution path.
|
||||||
# export HADOOP_HOME=
|
# export HADOOP_HOME=
|
||||||
|
|
|
@ -75,6 +75,14 @@ log4j.appender.console.target=System.err
|
||||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||||
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||||
|
|
||||||
|
#
|
||||||
|
# TaskLog Appender
|
||||||
|
#
|
||||||
|
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
|
||||||
|
|
||||||
|
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
|
||||||
|
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||||
|
|
||||||
#
|
#
|
||||||
# HDFS block state change log from block manager
|
# HDFS block state change log from block manager
|
||||||
#
|
#
|
||||||
|
@ -243,45 +251,30 @@ log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex}
|
||||||
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
|
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
|
||||||
|
|
||||||
# Http Server Request Logs
|
# Http Server Request Logs
|
||||||
#log4j.appender.AccessNNDRFA=org.apache.log4j.DailyRollingFileAppender
|
#log4j.logger.http.requests.namenode=INFO,namenoderequestlog
|
||||||
#log4j.appender.AccessNNDRFA.File=${hadoop.log.dir}/jetty-namenode.log
|
#log4j.appender.namenoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||||
#log4j.appender.AccessNNDRFA.DatePattern=.yyyy-MM-dd
|
#log4j.appender.namenoderequestlog.Filename=${hadoop.log.dir}/jetty-namenode-yyyy_mm_dd.log
|
||||||
#log4j.appender.AccessNNDRFA.layout=org.apache.log4j.PatternLayout
|
#log4j.appender.namenoderequestlog.RetainDays=3
|
||||||
#log4j.appender.AccessNNDRFA.layout.ConversionPattern=%m%n
|
|
||||||
|
|
||||||
#log4j.logger.http.requests.namenode=INFO,AccessNNDRFA
|
#log4j.logger.http.requests.datanode=INFO,datanoderequestlog
|
||||||
|
#log4j.appender.datanoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||||
|
#log4j.appender.datanoderequestlog.Filename=${hadoop.log.dir}/jetty-datanode-yyyy_mm_dd.log
|
||||||
|
#log4j.appender.datanoderequestlog.RetainDays=3
|
||||||
|
|
||||||
#log4j.appender.AccessDNDRFA=org.apache.log4j.DailyRollingFileAppender
|
#log4j.logger.http.requests.resourcemanager=INFO,resourcemanagerrequestlog
|
||||||
#log4j.appender.AccessDNDRFA.File=${hadoop.log.dir}/jetty-datanode.log
|
#log4j.appender.resourcemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||||
#log4j.appender.AccessDNDRFA.DatePattern=.yyyy-MM-dd
|
#log4j.appender.resourcemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-resourcemanager-yyyy_mm_dd.log
|
||||||
#log4j.appender.AccessDNDRFA.layout=org.apache.log4j.PatternLayout
|
#log4j.appender.resourcemanagerrequestlog.RetainDays=3
|
||||||
#log4j.appender.AccessDNDRFA.layout.ConversionPattern=%m%n
|
|
||||||
|
|
||||||
#log4j.logger.http.requests.datanode=INFO,AccessDNDRFA
|
#log4j.logger.http.requests.jobhistory=INFO,jobhistoryrequestlog
|
||||||
|
#log4j.appender.jobhistoryrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||||
|
#log4j.appender.jobhistoryrequestlog.Filename=${hadoop.log.dir}/jetty-jobhistory-yyyy_mm_dd.log
|
||||||
|
#log4j.appender.jobhistoryrequestlog.RetainDays=3
|
||||||
|
|
||||||
#log4j.appender.AccessRMDRFA=org.apache.log4j.DailyRollingFileAppender
|
#log4j.logger.http.requests.nodemanager=INFO,nodemanagerrequestlog
|
||||||
#log4j.appender.AccessRMDRFA.File=${hadoop.log.dir}/jetty-resourcemanager.log
|
#log4j.appender.nodemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||||
#log4j.appender.AccessRMDRFA.DatePattern=.yyyy-MM-dd
|
#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
|
||||||
#log4j.appender.AccessRMDRFA.layout=org.apache.log4j.PatternLayout
|
#log4j.appender.nodemanagerrequestlog.RetainDays=3
|
||||||
#log4j.appender.AccessRMDRFA.layout.ConversionPattern=%m%n
|
|
||||||
|
|
||||||
#log4j.logger.http.requests.resourcemanager=INFO,AccessRMDRFA
|
|
||||||
|
|
||||||
#log4j.appender.AccessJHDRFA=org.apache.log4j.DailyRollingFileAppender
|
|
||||||
#log4j.appender.AccessJHDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log
|
|
||||||
#log4j.appender.AccessJHDRFA.DatePattern=.yyyy-MM-dd
|
|
||||||
#log4j.appender.AccessJHDRFA.layout=org.apache.log4j.PatternLayout
|
|
||||||
#log4j.appender.AccessJHDRFA.layout.ConversionPattern=%m%n
|
|
||||||
|
|
||||||
#log4j.logger.http.requests.jobhistory=INFO,AccessJHDRFA
|
|
||||||
|
|
||||||
#log4j.appender.AccessNMDRFA=org.apache.log4j.DailyRollingFileAppender
|
|
||||||
#log4j.appender.AccessNMDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log
|
|
||||||
#log4j.appender.AccessNMDRFA.DatePattern=.yyyy-MM-dd
|
|
||||||
#log4j.appender.AccessNMDRFA.layout=org.apache.log4j.PatternLayout
|
|
||||||
#log4j.appender.AccessNMDRFA.layout.ConversionPattern=%m%n
|
|
||||||
|
|
||||||
#log4j.logger.http.requests.nodemanager=INFO,AccessNMDRFA
|
|
||||||
|
|
||||||
# WebHdfs request log on datanodes
|
# WebHdfs request log on datanodes
|
||||||
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
|
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class ConfServlet extends HttpServlet {
|
||||||
if (FORMAT_JSON.equals(format)) {
|
if (FORMAT_JSON.equals(format)) {
|
||||||
Configuration.dumpConfiguration(conf, propertyName, out);
|
Configuration.dumpConfiguration(conf, propertyName, out);
|
||||||
} else if (FORMAT_XML.equals(format)) {
|
} else if (FORMAT_XML.equals(format)) {
|
||||||
conf.writeXml(propertyName, out, conf);
|
conf.writeXml(propertyName, out);
|
||||||
} else {
|
} else {
|
||||||
throw new BadFormatException("Bad format: " + format);
|
throw new BadFormatException("Bad format: " + format);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
public class ConfigRedactor {
|
public class ConfigRedactor {
|
||||||
|
|
||||||
private static final String REDACTED_TEXT = "<redacted>";
|
private static final String REDACTED_TEXT = "<redacted>";
|
||||||
private static final String REDACTED_XML = "******";
|
|
||||||
|
|
||||||
private List<Pattern> compiledPatterns;
|
private List<Pattern> compiledPatterns;
|
||||||
|
|
||||||
|
@ -58,8 +57,8 @@ public class ConfigRedactor {
|
||||||
* Given a key / value pair, decides whether or not to redact and returns
|
* Given a key / value pair, decides whether or not to redact and returns
|
||||||
* either the original value or text indicating it has been redacted.
|
* either the original value or text indicating it has been redacted.
|
||||||
*
|
*
|
||||||
* @param key param key.
|
* @param key
|
||||||
* @param value param value, will return if conditions permit.
|
* @param value
|
||||||
* @return Original value, or text indicating it has been redacted
|
* @return Original value, or text indicating it has been redacted
|
||||||
*/
|
*/
|
||||||
public String redact(String key, String value) {
|
public String redact(String key, String value) {
|
||||||
|
@ -85,19 +84,4 @@ public class ConfigRedactor {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Given a key / value pair, decides whether or not to redact and returns
|
|
||||||
* either the original value or text indicating it has been redacted.
|
|
||||||
*
|
|
||||||
* @param key param key.
|
|
||||||
* @param value param value, will return if conditions permit.
|
|
||||||
* @return Original value, or text indicating it has been redacted
|
|
||||||
*/
|
|
||||||
public String redactXml(String key, String value) {
|
|
||||||
if (configIsSensitive(key)) {
|
|
||||||
return REDACTED_XML;
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import com.ctc.wstx.io.SystemId;
|
||||||
import com.ctc.wstx.stax.WstxInputFactory;
|
import com.ctc.wstx.stax.WstxInputFactory;
|
||||||
import com.fasterxml.jackson.core.JsonFactory;
|
import com.fasterxml.jackson.core.JsonFactory;
|
||||||
import com.fasterxml.jackson.core.JsonGenerator;
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
|
import org.apache.hadoop.classification.VisibleForTesting;
|
||||||
|
|
||||||
import java.io.BufferedInputStream;
|
import java.io.BufferedInputStream;
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
|
@ -86,7 +87,6 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
||||||
import org.apache.commons.collections.map.UnmodifiableMap;
|
import org.apache.commons.collections.map.UnmodifiableMap;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -98,19 +98,18 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.alias.CredentialProvider;
|
import org.apache.hadoop.security.alias.CredentialProvider;
|
||||||
import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
|
import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
|
||||||
import org.apache.hadoop.security.alias.CredentialProviderFactory;
|
import org.apache.hadoop.security.alias.CredentialProviderFactory;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
|
|
||||||
import org.apache.hadoop.util.Preconditions;
|
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.apache.hadoop.util.StringInterner;
|
import org.apache.hadoop.util.StringInterner;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.XMLUtils;
|
|
||||||
|
|
||||||
import org.codehaus.stax2.XMLStreamReader2;
|
import org.codehaus.stax2.XMLStreamReader2;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
import org.w3c.dom.Element;
|
import org.w3c.dom.Element;
|
||||||
|
|
||||||
|
import org.apache.hadoop.util.Preconditions;
|
||||||
|
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
@ -318,7 +317,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
private boolean loadDefaults = true;
|
private boolean loadDefaults = true;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration objects.
|
* Configuration objects
|
||||||
*/
|
*/
|
||||||
private static final WeakHashMap<Configuration,Object> REGISTRY =
|
private static final WeakHashMap<Configuration,Object> REGISTRY =
|
||||||
new WeakHashMap<Configuration,Object>();
|
new WeakHashMap<Configuration,Object>();
|
||||||
|
@ -775,7 +774,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
private void handleDeprecation() {
|
private void handleDeprecation() {
|
||||||
LOG.debug("Handling deprecation for all properties in config...");
|
LOG.debug("Handling deprecation for all properties in config...");
|
||||||
DeprecationContext deprecations = deprecationContext.get();
|
DeprecationContext deprecations = deprecationContext.get();
|
||||||
Set<Object> keys = new HashSet<>();
|
Set<Object> keys = new HashSet<Object>();
|
||||||
keys.addAll(getProps().keySet());
|
keys.addAll(getProps().keySet());
|
||||||
for (Object item: keys) {
|
for (Object item: keys) {
|
||||||
LOG.debug("Handling deprecation for " + (String)item);
|
LOG.debug("Handling deprecation for " + (String)item);
|
||||||
|
@ -1909,7 +1908,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param name Property name
|
* @param name Property name
|
||||||
* @param vStr The string value with time unit suffix to be converted.
|
* @param vStr The string value with time unit suffix to be converted.
|
||||||
* @param unit Unit to convert the stored property, if it exists.
|
* @param unit Unit to convert the stored property, if it exists.
|
||||||
* @return time duration in given time unit.
|
|
||||||
*/
|
*/
|
||||||
public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
|
public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
|
||||||
return getTimeDurationHelper(name, vStr, unit, unit);
|
return getTimeDurationHelper(name, vStr, unit, unit);
|
||||||
|
@ -1924,7 +1922,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param vStr The string value with time unit suffix to be converted.
|
* @param vStr The string value with time unit suffix to be converted.
|
||||||
* @param defaultUnit Unit to convert the stored property, if it exists.
|
* @param defaultUnit Unit to convert the stored property, if it exists.
|
||||||
* @param returnUnit Unit for the returned value.
|
* @param returnUnit Unit for the returned value.
|
||||||
* @return time duration in given time unit.
|
|
||||||
*/
|
*/
|
||||||
private long getTimeDurationHelper(String name, String vStr,
|
private long getTimeDurationHelper(String name, String vStr,
|
||||||
TimeUnit defaultUnit, TimeUnit returnUnit) {
|
TimeUnit defaultUnit, TimeUnit returnUnit) {
|
||||||
|
@ -2209,7 +2206,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the given value in the set of ranges.
|
* Is the given value in the set of ranges
|
||||||
* @param value the value to check
|
* @param value the value to check
|
||||||
* @return is the value in the ranges?
|
* @return is the value in the ranges?
|
||||||
*/
|
*/
|
||||||
|
@ -2266,7 +2263,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse the given attribute as a set of integer ranges.
|
* Parse the given attribute as a set of integer ranges
|
||||||
* @param name the attribute name
|
* @param name the attribute name
|
||||||
* @param defaultValue the default value if it is not set
|
* @param defaultValue the default value if it is not set
|
||||||
* @return a new set of ranges from the configured value
|
* @return a new set of ranges from the configured value
|
||||||
|
@ -2485,7 +2482,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fallback to clear text passwords in configuration.
|
* Fallback to clear text passwords in configuration.
|
||||||
* @param name the property name.
|
* @param name
|
||||||
* @return clear text password or null
|
* @return clear text password or null
|
||||||
*/
|
*/
|
||||||
protected char[] getPasswordFromConfig(String name) {
|
protected char[] getPasswordFromConfig(String name) {
|
||||||
|
@ -2550,8 +2547,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
/**
|
/**
|
||||||
* Set the socket address for the <code>name</code> property as
|
* Set the socket address for the <code>name</code> property as
|
||||||
* a <code>host:port</code>.
|
* a <code>host:port</code>.
|
||||||
* @param name property name.
|
|
||||||
* @param addr inetSocketAddress addr.
|
|
||||||
*/
|
*/
|
||||||
public void setSocketAddr(String name, InetSocketAddress addr) {
|
public void setSocketAddr(String name, InetSocketAddress addr) {
|
||||||
set(name, NetUtils.getHostPortString(addr));
|
set(name, NetUtils.getHostPortString(addr));
|
||||||
|
@ -2729,7 +2724,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param name the conf key name.
|
* @param name the conf key name.
|
||||||
* @param defaultValue default value.
|
* @param defaultValue default value.
|
||||||
* @param xface the interface implemented by the named class.
|
* @param xface the interface implemented by the named class.
|
||||||
* @param <U> Interface class type.
|
|
||||||
* @return property value as a <code>Class</code>,
|
* @return property value as a <code>Class</code>,
|
||||||
* or <code>defaultValue</code>.
|
* or <code>defaultValue</code>.
|
||||||
*/
|
*/
|
||||||
|
@ -2759,7 +2753,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param name the property name.
|
* @param name the property name.
|
||||||
* @param xface the interface implemented by the classes named by
|
* @param xface the interface implemented by the classes named by
|
||||||
* <code>name</code>.
|
* <code>name</code>.
|
||||||
* @param <U> Interface class type.
|
|
||||||
* @return a <code>List</code> of objects implementing <code>xface</code>.
|
* @return a <code>List</code> of objects implementing <code>xface</code>.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -2801,7 +2794,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param dirsProp directory in which to locate the file.
|
* @param dirsProp directory in which to locate the file.
|
||||||
* @param path file-path.
|
* @param path file-path.
|
||||||
* @return local file under the directory with the given path.
|
* @return local file under the directory with the given path.
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public Path getLocalPath(String dirsProp, String path)
|
public Path getLocalPath(String dirsProp, String path)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -2834,7 +2826,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param dirsProp directory in which to locate the file.
|
* @param dirsProp directory in which to locate the file.
|
||||||
* @param path file-path.
|
* @param path file-path.
|
||||||
* @return local file under the directory with the given path.
|
* @return local file under the directory with the given path.
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public File getFile(String dirsProp, String path)
|
public File getFile(String dirsProp, String path)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -2987,15 +2978,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
// methods that allow non-strings to be put into configurations are removed,
|
// methods that allow non-strings to be put into configurations are removed,
|
||||||
// we could replace properties with a Map<String,String> and get rid of this
|
// we could replace properties with a Map<String,String> and get rid of this
|
||||||
// code.
|
// code.
|
||||||
Properties props = getProps();
|
Map<String,String> result = new HashMap<String,String>();
|
||||||
Map<String, String> result = new HashMap<>();
|
for(Map.Entry<Object,Object> item: getProps().entrySet()) {
|
||||||
synchronized (props) {
|
if (item.getKey() instanceof String &&
|
||||||
for (Map.Entry<Object, Object> item : props.entrySet()) {
|
item.getValue() instanceof String) {
|
||||||
if (item.getKey() instanceof String && item.getValue() instanceof String) {
|
|
||||||
result.put((String) item.getKey(), (String) item.getValue());
|
result.put((String) item.getKey(), (String) item.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return result.entrySet().iterator();
|
return result.entrySet().iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3446,7 +3435,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
|
* Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
|
||||||
* @param prop properties.
|
* @param prop
|
||||||
*/
|
*/
|
||||||
public void addTags(Properties prop) {
|
public void addTags(Properties prop) {
|
||||||
// Get all system tags
|
// Get all system tags
|
||||||
|
@ -3547,7 +3536,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print a warning if a property with a given name already exists with a
|
* Print a warning if a property with a given name already exists with a
|
||||||
* different value.
|
* different value
|
||||||
*/
|
*/
|
||||||
private void checkForOverride(Properties properties, String name, String attr, String value) {
|
private void checkForOverride(Properties properties, String name, String attr, String value) {
|
||||||
String propertyValue = properties.getProperty(attr);
|
String propertyValue = properties.getProperty(attr);
|
||||||
|
@ -3562,7 +3551,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* {@link OutputStream} using UTF-8 encoding.
|
* {@link OutputStream} using UTF-8 encoding.
|
||||||
*
|
*
|
||||||
* @param out the output stream to write to.
|
* @param out the output stream to write to.
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void writeXml(OutputStream out) throws IOException {
|
public void writeXml(OutputStream out) throws IOException {
|
||||||
writeXml(new OutputStreamWriter(out, "UTF-8"));
|
writeXml(new OutputStreamWriter(out, "UTF-8"));
|
||||||
|
@ -3592,20 +3580,16 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* the configuration, this method throws an {@link IllegalArgumentException}.
|
* the configuration, this method throws an {@link IllegalArgumentException}.
|
||||||
* </li>
|
* </li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* @param propertyName xml property name.
|
|
||||||
* @param out the writer to write to.
|
* @param out the writer to write to.
|
||||||
* @param config configuration.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void writeXml(@Nullable String propertyName, Writer out, Configuration config)
|
public void writeXml(@Nullable String propertyName, Writer out)
|
||||||
throws IOException, IllegalArgumentException {
|
throws IOException, IllegalArgumentException {
|
||||||
ConfigRedactor redactor = config != null ? new ConfigRedactor(this) : null;
|
Document doc = asXmlDocument(propertyName);
|
||||||
Document doc = asXmlDocument(propertyName, redactor);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
DOMSource source = new DOMSource(doc);
|
DOMSource source = new DOMSource(doc);
|
||||||
StreamResult result = new StreamResult(out);
|
StreamResult result = new StreamResult(out);
|
||||||
TransformerFactory transFactory = XMLUtils.newSecureTransformerFactory();
|
TransformerFactory transFactory = TransformerFactory.newInstance();
|
||||||
Transformer transformer = transFactory.newTransformer();
|
Transformer transformer = transFactory.newTransformer();
|
||||||
|
|
||||||
// Important to not hold Configuration log while writing result, since
|
// Important to not hold Configuration log while writing result, since
|
||||||
|
@ -3617,16 +3601,11 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeXml(@Nullable String propertyName, Writer out)
|
|
||||||
throws IOException, IllegalArgumentException {
|
|
||||||
writeXml(propertyName, out, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the XML DOM corresponding to this Configuration.
|
* Return the XML DOM corresponding to this Configuration.
|
||||||
*/
|
*/
|
||||||
private synchronized Document asXmlDocument(@Nullable String propertyName,
|
private synchronized Document asXmlDocument(@Nullable String propertyName)
|
||||||
ConfigRedactor redactor) throws IOException, IllegalArgumentException {
|
throws IOException, IllegalArgumentException {
|
||||||
Document doc;
|
Document doc;
|
||||||
try {
|
try {
|
||||||
doc = DocumentBuilderFactory
|
doc = DocumentBuilderFactory
|
||||||
|
@ -3649,13 +3628,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
propertyName + " not found");
|
propertyName + " not found");
|
||||||
} else {
|
} else {
|
||||||
// given property is found, write single property
|
// given property is found, write single property
|
||||||
appendXMLProperty(doc, conf, propertyName, redactor);
|
appendXMLProperty(doc, conf, propertyName);
|
||||||
conf.appendChild(doc.createTextNode("\n"));
|
conf.appendChild(doc.createTextNode("\n"));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// append all elements
|
// append all elements
|
||||||
for (Enumeration<Object> e = properties.keys(); e.hasMoreElements();) {
|
for (Enumeration<Object> e = properties.keys(); e.hasMoreElements();) {
|
||||||
appendXMLProperty(doc, conf, (String)e.nextElement(), redactor);
|
appendXMLProperty(doc, conf, (String)e.nextElement());
|
||||||
conf.appendChild(doc.createTextNode("\n"));
|
conf.appendChild(doc.createTextNode("\n"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3671,7 +3650,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param propertyName
|
* @param propertyName
|
||||||
*/
|
*/
|
||||||
private synchronized void appendXMLProperty(Document doc, Element conf,
|
private synchronized void appendXMLProperty(Document doc, Element conf,
|
||||||
String propertyName, ConfigRedactor redactor) {
|
String propertyName) {
|
||||||
// skip writing if given property name is empty or null
|
// skip writing if given property name is empty or null
|
||||||
if (!Strings.isNullOrEmpty(propertyName)) {
|
if (!Strings.isNullOrEmpty(propertyName)) {
|
||||||
String value = properties.getProperty(propertyName);
|
String value = properties.getProperty(propertyName);
|
||||||
|
@ -3684,11 +3663,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
propNode.appendChild(nameNode);
|
propNode.appendChild(nameNode);
|
||||||
|
|
||||||
Element valueNode = doc.createElement("value");
|
Element valueNode = doc.createElement("value");
|
||||||
String propertyValue = properties.getProperty(propertyName);
|
valueNode.appendChild(doc.createTextNode(
|
||||||
if (redactor != null) {
|
properties.getProperty(propertyName)));
|
||||||
propertyValue = redactor.redactXml(propertyName, propertyValue);
|
|
||||||
}
|
|
||||||
valueNode.appendChild(doc.createTextNode(propertyValue));
|
|
||||||
propNode.appendChild(valueNode);
|
propNode.appendChild(valueNode);
|
||||||
|
|
||||||
Element finalNode = doc.createElement("final");
|
Element finalNode = doc.createElement("final");
|
||||||
|
@ -3758,7 +3734,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param config the configuration
|
* @param config the configuration
|
||||||
* @param propertyName property name
|
* @param propertyName property name
|
||||||
* @param out the Writer to write to
|
* @param out the Writer to write to
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws IllegalArgumentException when property name is not
|
* @throws IllegalArgumentException when property name is not
|
||||||
* empty and the property is not found in configuration
|
* empty and the property is not found in configuration
|
||||||
**/
|
**/
|
||||||
|
@ -3805,7 +3781,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
*
|
*
|
||||||
* @param config the configuration
|
* @param config the configuration
|
||||||
* @param out the Writer to write to
|
* @param out the Writer to write to
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public static void dumpConfiguration(Configuration config,
|
public static void dumpConfiguration(Configuration config,
|
||||||
Writer out) throws IOException {
|
Writer out) throws IOException {
|
||||||
|
@ -3834,7 +3810,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
* @param jsonGen json writer
|
* @param jsonGen json writer
|
||||||
* @param config configuration
|
* @param config configuration
|
||||||
* @param name property name
|
* @param name property name
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private static void appendJSONProperty(JsonGenerator jsonGen,
|
private static void appendJSONProperty(JsonGenerator jsonGen,
|
||||||
Configuration config, String name, ConfigRedactor redactor)
|
Configuration config, String name, ConfigRedactor redactor)
|
||||||
|
@ -3916,10 +3892,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
return this.quietmode;
|
return this.quietmode;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** For debugging. List non-default properties to the terminal and exit.
|
/** For debugging. List non-default properties to the terminal and exit. */
|
||||||
* @param args the argument to be parsed.
|
|
||||||
* @throws Exception exception.
|
|
||||||
*/
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
new Configuration().writeXml(System.out);
|
new Configuration().writeXml(System.out);
|
||||||
}
|
}
|
||||||
|
@ -3953,8 +3926,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get keys matching the the regex.
|
* get keys matching the the regex
|
||||||
* @param regex the regex to match against.
|
* @param regex
|
||||||
* @return {@literal Map<String,String>} with matching keys
|
* @return {@literal Map<String,String>} with matching keys
|
||||||
*/
|
*/
|
||||||
public Map<String,String> getValByRegex(String regex) {
|
public Map<String,String> getValByRegex(String regex) {
|
||||||
|
@ -3999,8 +3972,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
/**
|
/**
|
||||||
* Returns whether or not a deprecated name has been warned. If the name is not
|
* Returns whether or not a deprecated name has been warned. If the name is not
|
||||||
* deprecated then always return false
|
* deprecated then always return false
|
||||||
* @param name proprties.
|
|
||||||
* @return true if name is a warned deprecation.
|
|
||||||
*/
|
*/
|
||||||
public static boolean hasWarnedDeprecation(String name) {
|
public static boolean hasWarnedDeprecation(String name) {
|
||||||
DeprecationContext deprecations = deprecationContext.get();
|
DeprecationContext deprecations = deprecationContext.get();
|
||||||
|
|
|
@ -33,9 +33,7 @@ public class Configured implements Configurable {
|
||||||
this(null);
|
this(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Construct a Configured.
|
/** Construct a Configured. */
|
||||||
* @param conf the Configuration object.
|
|
||||||
*/
|
|
||||||
public Configured(Configuration conf) {
|
public Configured(Configuration conf) {
|
||||||
setConf(conf);
|
setConf(conf);
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,9 +33,6 @@ public interface Reconfigurable extends Configurable {
|
||||||
* (or null if it was not previously set). If newVal is null, set the property
|
* (or null if it was not previously set). If newVal is null, set the property
|
||||||
* to its default value;
|
* to its default value;
|
||||||
*
|
*
|
||||||
* @param property property name.
|
|
||||||
* @param newVal new value.
|
|
||||||
* @throws ReconfigurationException if there was an error applying newVal.
|
|
||||||
* If the property cannot be changed, throw a
|
* If the property cannot be changed, throw a
|
||||||
* {@link ReconfigurationException}.
|
* {@link ReconfigurationException}.
|
||||||
*/
|
*/
|
||||||
|
@ -48,14 +45,11 @@ public interface Reconfigurable extends Configurable {
|
||||||
* If isPropertyReconfigurable returns true for a property,
|
* If isPropertyReconfigurable returns true for a property,
|
||||||
* then changeConf should not throw an exception when changing
|
* then changeConf should not throw an exception when changing
|
||||||
* this property.
|
* this property.
|
||||||
* @param property property name.
|
|
||||||
* @return true if property reconfigurable; false if not.
|
|
||||||
*/
|
*/
|
||||||
boolean isPropertyReconfigurable(String property);
|
boolean isPropertyReconfigurable(String property);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return all the properties that can be changed at run time.
|
* Return all the properties that can be changed at run time.
|
||||||
* @return reconfigurable propertys.
|
|
||||||
*/
|
*/
|
||||||
Collection<String> getReconfigurableProperties();
|
Collection<String> getReconfigurableProperties();
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,6 @@ public abstract class ReconfigurableBase
|
||||||
/**
|
/**
|
||||||
* Construct a ReconfigurableBase with the {@link Configuration}
|
* Construct a ReconfigurableBase with the {@link Configuration}
|
||||||
* conf.
|
* conf.
|
||||||
* @param conf configuration.
|
|
||||||
*/
|
*/
|
||||||
public ReconfigurableBase(Configuration conf) {
|
public ReconfigurableBase(Configuration conf) {
|
||||||
super((conf == null) ? new Configuration() : conf);
|
super((conf == null) ? new Configuration() : conf);
|
||||||
|
@ -92,7 +91,6 @@ public abstract class ReconfigurableBase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new configuration.
|
* Create a new configuration.
|
||||||
* @return configuration.
|
|
||||||
*/
|
*/
|
||||||
protected abstract Configuration getNewConf();
|
protected abstract Configuration getNewConf();
|
||||||
|
|
||||||
|
@ -164,7 +162,6 @@ public abstract class ReconfigurableBase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start a reconfiguration task to reload configuration in background.
|
* Start a reconfiguration task to reload configuration in background.
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void startReconfigurationTask() throws IOException {
|
public void startReconfigurationTask() throws IOException {
|
||||||
synchronized (reconfigLock) {
|
synchronized (reconfigLock) {
|
||||||
|
|
|
@ -59,10 +59,6 @@ public class ReconfigurationException extends Exception {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new instance of {@link ReconfigurationException}.
|
* Create a new instance of {@link ReconfigurationException}.
|
||||||
* @param property property name.
|
|
||||||
* @param newVal new value.
|
|
||||||
* @param oldVal old value.
|
|
||||||
* @param cause original exception.
|
|
||||||
*/
|
*/
|
||||||
public ReconfigurationException(String property,
|
public ReconfigurationException(String property,
|
||||||
String newVal, String oldVal,
|
String newVal, String oldVal,
|
||||||
|
@ -75,9 +71,6 @@ public class ReconfigurationException extends Exception {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new instance of {@link ReconfigurationException}.
|
* Create a new instance of {@link ReconfigurationException}.
|
||||||
* @param property property name.
|
|
||||||
* @param newVal new value.
|
|
||||||
* @param oldVal old value.
|
|
||||||
*/
|
*/
|
||||||
public ReconfigurationException(String property,
|
public ReconfigurationException(String property,
|
||||||
String newVal, String oldVal) {
|
String newVal, String oldVal) {
|
||||||
|
@ -89,7 +82,6 @@ public class ReconfigurationException extends Exception {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get property that cannot be changed.
|
* Get property that cannot be changed.
|
||||||
* @return property info.
|
|
||||||
*/
|
*/
|
||||||
public String getProperty() {
|
public String getProperty() {
|
||||||
return property;
|
return property;
|
||||||
|
@ -97,7 +89,6 @@ public class ReconfigurationException extends Exception {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get value to which property was supposed to be changed.
|
* Get value to which property was supposed to be changed.
|
||||||
* @return new value.
|
|
||||||
*/
|
*/
|
||||||
public String getNewValue() {
|
public String getNewValue() {
|
||||||
return newVal;
|
return newVal;
|
||||||
|
@ -105,7 +96,6 @@ public class ReconfigurationException extends Exception {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get old value of property that cannot be changed.
|
* Get old value of property that cannot be changed.
|
||||||
* @return old value.
|
|
||||||
*/
|
*/
|
||||||
public String getOldValue() {
|
public String getOldValue() {
|
||||||
return oldVal;
|
return oldVal;
|
||||||
|
|
|
@ -42,8 +42,7 @@ public class ReconfigurationTaskStatus {
|
||||||
/**
|
/**
|
||||||
* Return true if
|
* Return true if
|
||||||
* - A reconfiguration task has finished or
|
* - A reconfiguration task has finished or
|
||||||
* - an active reconfiguration task is running.
|
* - an active reconfiguration task is running
|
||||||
* @return true if startTime > 0; false if not.
|
|
||||||
*/
|
*/
|
||||||
public boolean hasTask() {
|
public boolean hasTask() {
|
||||||
return startTime > 0;
|
return startTime > 0;
|
||||||
|
@ -52,7 +51,6 @@ public class ReconfigurationTaskStatus {
|
||||||
/**
|
/**
|
||||||
* Return true if the latest reconfiguration task has finished and there is
|
* Return true if the latest reconfiguration task has finished and there is
|
||||||
* no another active task running.
|
* no another active task running.
|
||||||
* @return true if endTime > 0; false if not.
|
|
||||||
*/
|
*/
|
||||||
public boolean stopped() {
|
public boolean stopped() {
|
||||||
return endTime > 0;
|
return endTime > 0;
|
||||||
|
|
|
@ -146,17 +146,13 @@ public abstract class CryptoCodec implements Configurable, Closeable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a {@link org.apache.hadoop.crypto.Encryptor}.
|
* Create a {@link org.apache.hadoop.crypto.Encryptor}.
|
||||||
*
|
* @return Encryptor the encryptor
|
||||||
* @return Encryptor the encryptor.
|
|
||||||
* @throws GeneralSecurityException thrown if create encryptor error.
|
|
||||||
*/
|
*/
|
||||||
public abstract Encryptor createEncryptor() throws GeneralSecurityException;
|
public abstract Encryptor createEncryptor() throws GeneralSecurityException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a {@link org.apache.hadoop.crypto.Decryptor}.
|
* Create a {@link org.apache.hadoop.crypto.Decryptor}.
|
||||||
*
|
|
||||||
* @return Decryptor the decryptor
|
* @return Decryptor the decryptor
|
||||||
* @throws GeneralSecurityException thrown if create decryptor error.
|
|
||||||
*/
|
*/
|
||||||
public abstract Decryptor createDecryptor() throws GeneralSecurityException;
|
public abstract Decryptor createDecryptor() throws GeneralSecurityException;
|
||||||
|
|
||||||
|
|
|
@ -157,7 +157,7 @@ public class CryptoInputStream extends FilterInputStream implements
|
||||||
* @param off the buffer offset.
|
* @param off the buffer offset.
|
||||||
* @param len the maximum number of decrypted data bytes to read.
|
* @param len the maximum number of decrypted data bytes to read.
|
||||||
* @return int the total number of decrypted data bytes read into the buffer.
|
* @return int the total number of decrypted data bytes read into the buffer.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public int read(byte[] b, int off, int len) throws IOException {
|
public int read(byte[] b, int off, int len) throws IOException {
|
||||||
|
|
|
@ -146,7 +146,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
|
||||||
* @param b the data.
|
* @param b the data.
|
||||||
* @param off the start offset in the data.
|
* @param off the start offset in the data.
|
||||||
* @param len the number of bytes to write.
|
* @param len the number of bytes to write.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public synchronized void write(byte[] b, int off, int len) throws IOException {
|
public synchronized void write(byte[] b, int off, int len) throws IOException {
|
||||||
|
@ -240,16 +240,13 @@ public class CryptoOutputStream extends FilterOutputStream implements
|
||||||
if (closed) {
|
if (closed) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
|
||||||
try {
|
try {
|
||||||
flush();
|
flush();
|
||||||
} finally {
|
|
||||||
if (closeOutputStream) {
|
if (closeOutputStream) {
|
||||||
super.close();
|
super.close();
|
||||||
codec.close();
|
codec.close();
|
||||||
}
|
}
|
||||||
freeBuffers();
|
freeBuffers();
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
closed = true;
|
closed = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,11 +39,7 @@ public class CryptoStreamUtils {
|
||||||
private static final Logger LOG =
|
private static final Logger LOG =
|
||||||
LoggerFactory.getLogger(CryptoStreamUtils.class);
|
LoggerFactory.getLogger(CryptoStreamUtils.class);
|
||||||
|
|
||||||
/**
|
/** Forcibly free the direct buffer. */
|
||||||
* Forcibly free the direct buffer.
|
|
||||||
*
|
|
||||||
* @param buffer buffer.
|
|
||||||
*/
|
|
||||||
public static void freeDB(ByteBuffer buffer) {
|
public static void freeDB(ByteBuffer buffer) {
|
||||||
if (CleanerUtil.UNMAP_SUPPORTED) {
|
if (CleanerUtil.UNMAP_SUPPORTED) {
|
||||||
try {
|
try {
|
||||||
|
@ -56,22 +52,13 @@ public class CryptoStreamUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Read crypto buffer size */
|
||||||
* Read crypto buffer size.
|
|
||||||
*
|
|
||||||
* @param conf configuration.
|
|
||||||
* @return hadoop.security.crypto.buffer.size.
|
|
||||||
*/
|
|
||||||
public static int getBufferSize(Configuration conf) {
|
public static int getBufferSize(Configuration conf) {
|
||||||
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
|
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
|
||||||
HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
|
HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
|
||||||
* AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
|
|
||||||
*
|
|
||||||
* @param codec crypto codec.
|
|
||||||
*/
|
|
||||||
public static void checkCodec(CryptoCodec codec) {
|
public static void checkCodec(CryptoCodec codec) {
|
||||||
if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
|
if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
|
||||||
codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
|
codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
|
||||||
|
@ -80,13 +67,7 @@ public class CryptoStreamUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Check and floor buffer size */
|
||||||
* Check and floor buffer size.
|
|
||||||
*
|
|
||||||
* @param codec crypto codec.
|
|
||||||
* @param bufferSize the size of the buffer to be used.
|
|
||||||
* @return calc buffer size.
|
|
||||||
*/
|
|
||||||
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
|
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
|
||||||
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
|
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
|
||||||
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
|
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
|
||||||
|
@ -97,10 +78,6 @@ public class CryptoStreamUtils {
|
||||||
/**
|
/**
|
||||||
* If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
|
* If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
|
||||||
* current position, otherwise return 0;
|
* current position, otherwise return 0;
|
||||||
*
|
|
||||||
* @param in wrapper.
|
|
||||||
* @return current position, otherwise return 0.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public static long getInputStreamOffset(InputStream in) throws IOException {
|
public static long getInputStreamOffset(InputStream in) throws IOException {
|
||||||
if (in instanceof Seekable) {
|
if (in instanceof Seekable) {
|
||||||
|
|
|
@ -230,28 +230,29 @@ public final class OpensslCipher {
|
||||||
* Finishes a multiple-part operation. The data is encrypted or decrypted,
|
* Finishes a multiple-part operation. The data is encrypted or decrypted,
|
||||||
* depending on how this cipher was initialized.
|
* depending on how this cipher was initialized.
|
||||||
* <p>
|
* <p>
|
||||||
|
*
|
||||||
* The result is stored in the output buffer. Upon return, the output buffer's
|
* The result is stored in the output buffer. Upon return, the output buffer's
|
||||||
* position will have advanced by n, where n is the value returned by this
|
* position will have advanced by n, where n is the value returned by this
|
||||||
* method; the output buffer's limit will not have changed.
|
* method; the output buffer's limit will not have changed.
|
||||||
* </p>
|
* <p>
|
||||||
|
*
|
||||||
* If <code>output.remaining()</code> bytes are insufficient to hold the result,
|
* If <code>output.remaining()</code> bytes are insufficient to hold the result,
|
||||||
* a <code>ShortBufferException</code> is thrown.
|
* a <code>ShortBufferException</code> is thrown.
|
||||||
* <p>
|
* <p>
|
||||||
|
*
|
||||||
* Upon finishing, this method resets this cipher object to the state it was
|
* Upon finishing, this method resets this cipher object to the state it was
|
||||||
* in when previously initialized. That is, the object is available to encrypt
|
* in when previously initialized. That is, the object is available to encrypt
|
||||||
* or decrypt more data.
|
* or decrypt more data.
|
||||||
* </p>
|
* <p>
|
||||||
|
*
|
||||||
* If any exception is thrown, this cipher object need to be reset before it
|
* If any exception is thrown, this cipher object need to be reset before it
|
||||||
* can be used again.
|
* can be used again.
|
||||||
*
|
*
|
||||||
* @param output the output ByteBuffer
|
* @param output the output ByteBuffer
|
||||||
* @return int number of bytes stored in <code>output</code>
|
* @return int number of bytes stored in <code>output</code>
|
||||||
* @throws ShortBufferException if there is insufficient space in the output buffer.
|
* @throws ShortBufferException
|
||||||
* @throws IllegalBlockSizeException This exception is thrown when the length
|
* @throws IllegalBlockSizeException
|
||||||
* of data provided to a block cipher is incorrect.
|
* @throws BadPaddingException
|
||||||
* @throws BadPaddingException This exception is thrown when a particular
|
|
||||||
* padding mechanism is expected for the input
|
|
||||||
* data but the data is not padded properly.
|
|
||||||
*/
|
*/
|
||||||
public int doFinal(ByteBuffer output) throws ShortBufferException,
|
public int doFinal(ByteBuffer output) throws ShortBufferException,
|
||||||
IllegalBlockSizeException, BadPaddingException {
|
IllegalBlockSizeException, BadPaddingException {
|
||||||
|
|
|
@ -242,7 +242,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Serialize the metadata to a set of bytes.
|
* Serialize the metadata to a set of bytes.
|
||||||
* @return the serialized bytes
|
* @return the serialized bytes
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
protected byte[] serialize() throws IOException {
|
protected byte[] serialize() throws IOException {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||||
|
@ -281,7 +281,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Deserialize a new metadata object from a set of bytes.
|
* Deserialize a new metadata object from a set of bytes.
|
||||||
* @param bytes the serialized metadata
|
* @param bytes the serialized metadata
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
protected Metadata(byte[] bytes) throws IOException {
|
protected Metadata(byte[] bytes) throws IOException {
|
||||||
String cipher = null;
|
String cipher = null;
|
||||||
|
@ -450,7 +450,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* when decrypting data.
|
* when decrypting data.
|
||||||
* @param versionName the name of a specific version of the key
|
* @param versionName the name of a specific version of the key
|
||||||
* @return the key material
|
* @return the key material
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract KeyVersion getKeyVersion(String versionName
|
public abstract KeyVersion getKeyVersion(String versionName
|
||||||
) throws IOException;
|
) throws IOException;
|
||||||
|
@ -458,15 +458,14 @@ public abstract class KeyProvider implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Get the key names for all keys.
|
* Get the key names for all keys.
|
||||||
* @return the list of key names
|
* @return the list of key names
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract List<String> getKeys() throws IOException;
|
public abstract List<String> getKeys() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get key metadata in bulk.
|
* Get key metadata in bulk.
|
||||||
* @param names the names of the keys to get
|
* @param names the names of the keys to get
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @return Metadata Array.
|
|
||||||
*/
|
*/
|
||||||
public Metadata[] getKeysMetadata(String... names) throws IOException {
|
public Metadata[] getKeysMetadata(String... names) throws IOException {
|
||||||
Metadata[] result = new Metadata[names.length];
|
Metadata[] result = new Metadata[names.length];
|
||||||
|
@ -478,10 +477,8 @@ public abstract class KeyProvider implements Closeable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the key material for all versions of a specific key name.
|
* Get the key material for all versions of a specific key name.
|
||||||
*
|
|
||||||
* @param name the base name of the key.
|
|
||||||
* @return the list of key material
|
* @return the list of key material
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract List<KeyVersion> getKeyVersions(String name) throws IOException;
|
public abstract List<KeyVersion> getKeyVersions(String name) throws IOException;
|
||||||
|
|
||||||
|
@ -491,7 +488,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* @param name the base name of the key
|
* @param name the base name of the key
|
||||||
* @return the version name of the current version of the key or null if the
|
* @return the version name of the current version of the key or null if the
|
||||||
* key version doesn't exist
|
* key version doesn't exist
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public KeyVersion getCurrentKey(String name) throws IOException {
|
public KeyVersion getCurrentKey(String name) throws IOException {
|
||||||
Metadata meta = getMetadata(name);
|
Metadata meta = getMetadata(name);
|
||||||
|
@ -505,7 +502,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* Get metadata about the key.
|
* Get metadata about the key.
|
||||||
* @param name the basename of the key
|
* @param name the basename of the key
|
||||||
* @return the key's metadata or null if the key doesn't exist
|
* @return the key's metadata or null if the key doesn't exist
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract Metadata getMetadata(String name) throws IOException;
|
public abstract Metadata getMetadata(String name) throws IOException;
|
||||||
|
|
||||||
|
@ -515,7 +512,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* @param material the key material for the first version of the key.
|
* @param material the key material for the first version of the key.
|
||||||
* @param options the options for the new key.
|
* @param options the options for the new key.
|
||||||
* @return the version name of the first version of the key.
|
* @return the version name of the first version of the key.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract KeyVersion createKey(String name, byte[] material,
|
public abstract KeyVersion createKey(String name, byte[] material,
|
||||||
Options options) throws IOException;
|
Options options) throws IOException;
|
||||||
|
@ -540,7 +537,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* @param size length of the key.
|
* @param size length of the key.
|
||||||
* @param algorithm algorithm to use for generating the key.
|
* @param algorithm algorithm to use for generating the key.
|
||||||
* @return the generated key.
|
* @return the generated key.
|
||||||
* @throws NoSuchAlgorithmException no such algorithm exception.
|
* @throws NoSuchAlgorithmException
|
||||||
*/
|
*/
|
||||||
protected byte[] generateKey(int size, String algorithm)
|
protected byte[] generateKey(int size, String algorithm)
|
||||||
throws NoSuchAlgorithmException {
|
throws NoSuchAlgorithmException {
|
||||||
|
@ -561,8 +558,8 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* @param name the base name of the key
|
* @param name the base name of the key
|
||||||
* @param options the options for the new key.
|
* @param options the options for the new key.
|
||||||
* @return the version name of the first version of the key.
|
* @return the version name of the first version of the key.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws NoSuchAlgorithmException no such algorithm exception.
|
* @throws NoSuchAlgorithmException
|
||||||
*/
|
*/
|
||||||
public KeyVersion createKey(String name, Options options)
|
public KeyVersion createKey(String name, Options options)
|
||||||
throws NoSuchAlgorithmException, IOException {
|
throws NoSuchAlgorithmException, IOException {
|
||||||
|
@ -573,7 +570,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Delete the given key.
|
* Delete the given key.
|
||||||
* @param name the name of the key to delete
|
* @param name the name of the key to delete
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract void deleteKey(String name) throws IOException;
|
public abstract void deleteKey(String name) throws IOException;
|
||||||
|
|
||||||
|
@ -582,7 +579,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* @param name the basename of the key
|
* @param name the basename of the key
|
||||||
* @param material the new key material
|
* @param material the new key material
|
||||||
* @return the name of the new version of the key
|
* @return the name of the new version of the key
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract KeyVersion rollNewVersion(String name,
|
public abstract KeyVersion rollNewVersion(String name,
|
||||||
byte[] material
|
byte[] material
|
||||||
|
@ -604,10 +601,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
*
|
*
|
||||||
* @param name the basename of the key
|
* @param name the basename of the key
|
||||||
* @return the name of the new version of the key
|
* @return the name of the new version of the key
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws NoSuchAlgorithmException This exception is thrown when a particular
|
|
||||||
* cryptographic algorithm is requested
|
|
||||||
* but is not available in the environment.
|
|
||||||
*/
|
*/
|
||||||
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
|
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
|
||||||
IOException {
|
IOException {
|
||||||
|
@ -626,7 +620,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* version of the given key.
|
* version of the given key.
|
||||||
*
|
*
|
||||||
* @param name the basename of the key
|
* @param name the basename of the key
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void invalidateCache(String name) throws IOException {
|
public void invalidateCache(String name) throws IOException {
|
||||||
// NOP
|
// NOP
|
||||||
|
@ -634,19 +628,18 @@ public abstract class KeyProvider implements Closeable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensures that any changes to the keys are written to persistent store.
|
* Ensures that any changes to the keys are written to persistent store.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public abstract void flush() throws IOException;
|
public abstract void flush() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Split the versionName in to a base name. Converts "/aaa/bbb@3" to
|
* Split the versionName in to a base name. Converts "/aaa/bbb/3" to
|
||||||
* "/aaa/bbb".
|
* "/aaa/bbb".
|
||||||
* @param versionName the version name to split
|
* @param versionName the version name to split
|
||||||
* @return the base name of the key
|
* @return the base name of the key
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public static String getBaseName(String versionName) throws IOException {
|
public static String getBaseName(String versionName) throws IOException {
|
||||||
Objects.requireNonNull(versionName, "VersionName cannot be null");
|
|
||||||
int div = versionName.lastIndexOf('@');
|
int div = versionName.lastIndexOf('@');
|
||||||
if (div == -1) {
|
if (div == -1) {
|
||||||
throw new IOException("No version in key path " + versionName);
|
throw new IOException("No version in key path " + versionName);
|
||||||
|
@ -667,11 +660,9 @@ public abstract class KeyProvider implements Closeable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the provider with the given key.
|
* Find the provider with the given key.
|
||||||
*
|
|
||||||
* @param providerList the list of providers
|
* @param providerList the list of providers
|
||||||
* @param keyName the key name we are looking for.
|
* @param keyName the key name we are looking for
|
||||||
* @return the KeyProvider that has the key
|
* @return the KeyProvider that has the key
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public static KeyProvider findProvider(List<KeyProvider> providerList,
|
public static KeyProvider findProvider(List<KeyProvider> providerList,
|
||||||
String keyName) throws IOException {
|
String keyName) throws IOException {
|
||||||
|
@ -689,7 +680,7 @@ public abstract class KeyProvider implements Closeable {
|
||||||
* means. If true, the password should be provided by the caller using
|
* means. If true, the password should be provided by the caller using
|
||||||
* setPassword().
|
* setPassword().
|
||||||
* @return Whether or not the provider requires a password
|
* @return Whether or not the provider requires a password
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public boolean needsPassword() throws IOException {
|
public boolean needsPassword() throws IOException {
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -25,6 +25,10 @@ import java.security.SecureRandom;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ListIterator;
|
import java.util.ListIterator;
|
||||||
|
|
||||||
|
import javax.crypto.Cipher;
|
||||||
|
import javax.crypto.spec.IvParameterSpec;
|
||||||
|
import javax.crypto.spec.SecretKeySpec;
|
||||||
|
|
||||||
import org.apache.hadoop.util.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.crypto.CryptoCodec;
|
import org.apache.hadoop.crypto.CryptoCodec;
|
||||||
|
@ -174,7 +178,6 @@ public class KeyProviderCryptoExtension extends
|
||||||
* Calls to this method allows the underlying KeyProvider to warm-up any
|
* Calls to this method allows the underlying KeyProvider to warm-up any
|
||||||
* implementation specific caches used to store the Encrypted Keys.
|
* implementation specific caches used to store the Encrypted Keys.
|
||||||
* @param keyNames Array of Key Names
|
* @param keyNames Array of Key Names
|
||||||
* @throws IOException thrown if the key material could not be encrypted.
|
|
||||||
*/
|
*/
|
||||||
public void warmUpEncryptedKeys(String... keyNames)
|
public void warmUpEncryptedKeys(String... keyNames)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
@ -471,9 +474,8 @@ public class KeyProviderCryptoExtension extends
|
||||||
/**
|
/**
|
||||||
* This constructor is to be used by sub classes that provide
|
* This constructor is to be used by sub classes that provide
|
||||||
* delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
|
* delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
|
||||||
*
|
* @param keyProvider
|
||||||
* @param keyProvider key provider.
|
* @param extension
|
||||||
* @param extension crypto extension.
|
|
||||||
*/
|
*/
|
||||||
protected KeyProviderCryptoExtension(KeyProvider keyProvider,
|
protected KeyProviderCryptoExtension(KeyProvider keyProvider,
|
||||||
CryptoExtension extension) {
|
CryptoExtension extension) {
|
||||||
|
@ -484,7 +486,6 @@ public class KeyProviderCryptoExtension extends
|
||||||
* Notifies the Underlying CryptoExtension implementation to warm up any
|
* Notifies the Underlying CryptoExtension implementation to warm up any
|
||||||
* implementation specific caches for the specified KeyVersions
|
* implementation specific caches for the specified KeyVersions
|
||||||
* @param keyNames Arrays of key Names
|
* @param keyNames Arrays of key Names
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void warmUpEncryptedKeys(String... keyNames)
|
public void warmUpEncryptedKeys(String... keyNames)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -556,7 +557,7 @@ public class KeyProviderCryptoExtension extends
|
||||||
* Calls {@link CryptoExtension#drain(String)} for the given key name on the
|
* Calls {@link CryptoExtension#drain(String)} for the given key name on the
|
||||||
* underlying {@link CryptoExtension}.
|
* underlying {@link CryptoExtension}.
|
||||||
*
|
*
|
||||||
* @param keyName key name.
|
* @param keyName
|
||||||
*/
|
*/
|
||||||
public void drain(String keyName) {
|
public void drain(String keyName) {
|
||||||
getExtension().drain(keyName);
|
getExtension().drain(keyName);
|
||||||
|
|
|
@ -48,14 +48,14 @@ public class KeyProviderDelegationTokenExtension extends
|
||||||
* Renews the given token.
|
* Renews the given token.
|
||||||
* @param token The token to be renewed.
|
* @param token The token to be renewed.
|
||||||
* @return The token's lifetime after renewal, or 0 if it can't be renewed.
|
* @return The token's lifetime after renewal, or 0 if it can't be renewed.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
long renewDelegationToken(final Token<?> token) throws IOException;
|
long renewDelegationToken(final Token<?> token) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cancels the given token.
|
* Cancels the given token.
|
||||||
* @param token The token to be cancelled.
|
* @param token The token to be cancelled.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
Void cancelDelegationToken(final Token<?> token) throws IOException;
|
Void cancelDelegationToken(final Token<?> token) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
|
||||||
* </pre>
|
* </pre>
|
||||||
* @param args Command line arguments.
|
* @param args Command line arguments.
|
||||||
* @return 0 on success, 1 on failure.
|
* @return 0 on success, 1 on failure.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected int init(String[] args) throws IOException {
|
protected int init(String[] args) throws IOException {
|
||||||
|
@ -547,7 +547,7 @@ public class KeyShell extends CommandShell {
|
||||||
* success and 1 for failure.
|
* success and 1 for failure.
|
||||||
*
|
*
|
||||||
* @param args Command line arguments.
|
* @param args Command line arguments.
|
||||||
* @throws Exception raised on errors performing I/O.
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
|
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.apache.hadoop.crypto.key.kms;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InterruptedIOException;
|
import java.io.InterruptedIOException;
|
||||||
import java.net.ConnectException;
|
import java.net.ConnectException;
|
||||||
import java.net.SocketException;
|
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.security.GeneralSecurityException;
|
import java.security.GeneralSecurityException;
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
@ -30,7 +29,7 @@ import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import javax.net.ssl.SSLException;
|
import javax.net.ssl.SSLHandshakeException;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
|
@ -183,10 +182,10 @@ public class LoadBalancingKMSClientProvider extends KeyProvider implements
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
LOG.warn("KMS provider at [{}] threw an IOException: ",
|
LOG.warn("KMS provider at [{}] threw an IOException: ",
|
||||||
provider.getKMSUrl(), ioe);
|
provider.getKMSUrl(), ioe);
|
||||||
// SSLException can occur here because of lost connection
|
// SSLHandshakeException can occur here because of lost connection
|
||||||
// with the KMS server, creating a ConnectException from it,
|
// with the KMS server, creating a ConnectException from it,
|
||||||
// so that the FailoverOnNetworkExceptionRetry policy will retry
|
// so that the FailoverOnNetworkExceptionRetry policy will retry
|
||||||
if (ioe instanceof SSLException || ioe instanceof SocketException) {
|
if (ioe instanceof SSLHandshakeException) {
|
||||||
Exception cause = ioe;
|
Exception cause = ioe;
|
||||||
ioe = new ConnectException("SSLHandshakeException: "
|
ioe = new ConnectException("SSLHandshakeException: "
|
||||||
+ cause.getMessage());
|
+ cause.getMessage());
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class ValueQueue <E> {
|
||||||
* @param keyName Key name
|
* @param keyName Key name
|
||||||
* @param keyQueue Queue that needs to be filled
|
* @param keyQueue Queue that needs to be filled
|
||||||
* @param numValues number of Values to be added to the queue.
|
* @param numValues number of Values to be added to the queue.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void fillQueueForKey(String keyName,
|
public void fillQueueForKey(String keyName,
|
||||||
Queue<E> keyQueue, int numValues) throws IOException;
|
Queue<E> keyQueue, int numValues) throws IOException;
|
||||||
|
@ -268,7 +268,7 @@ public class ValueQueue <E> {
|
||||||
* Initializes the Value Queues for the provided keys by calling the
|
* Initializes the Value Queues for the provided keys by calling the
|
||||||
* fill Method with "numInitValues" values
|
* fill Method with "numInitValues" values
|
||||||
* @param keyNames Array of key Names
|
* @param keyNames Array of key Names
|
||||||
* @throws ExecutionException executionException.
|
* @throws ExecutionException
|
||||||
*/
|
*/
|
||||||
public void initializeQueuesForKeys(String... keyNames)
|
public void initializeQueuesForKeys(String... keyNames)
|
||||||
throws ExecutionException {
|
throws ExecutionException {
|
||||||
|
@ -285,8 +285,8 @@ public class ValueQueue <E> {
|
||||||
* function to add 1 value to Queue and then drain it.
|
* function to add 1 value to Queue and then drain it.
|
||||||
* @param keyName String key name
|
* @param keyName String key name
|
||||||
* @return E the next value in the Queue
|
* @return E the next value in the Queue
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws ExecutionException executionException.
|
* @throws ExecutionException
|
||||||
*/
|
*/
|
||||||
public E getNext(String keyName)
|
public E getNext(String keyName)
|
||||||
throws IOException, ExecutionException {
|
throws IOException, ExecutionException {
|
||||||
|
@ -344,8 +344,8 @@ public class ValueQueue <E> {
|
||||||
* @param keyName String key name
|
* @param keyName String key name
|
||||||
* @param num Minimum number of values to return.
|
* @param num Minimum number of values to return.
|
||||||
* @return {@literal List<E>} values returned
|
* @return {@literal List<E>} values returned
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws ExecutionException execution exception.
|
* @throws ExecutionException
|
||||||
*/
|
*/
|
||||||
public List<E> getAtMost(String keyName, int num) throws IOException,
|
public List<E> getAtMost(String keyName, int num) throws IOException,
|
||||||
ExecutionException {
|
ExecutionException {
|
||||||
|
|
|
@ -272,7 +272,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param supportedScheme the scheme supported by the implementor
|
* @param supportedScheme the scheme supported by the implementor
|
||||||
* @param authorityNeeded if true then theURI must have authority, if false
|
* @param authorityNeeded if true then theURI must have authority, if false
|
||||||
* then the URI must have null authority.
|
* then the URI must have null authority.
|
||||||
* @param defaultPort default port to use if port is not specified in the URI.
|
*
|
||||||
* @throws URISyntaxException <code>uri</code> has syntax error
|
* @throws URISyntaxException <code>uri</code> has syntax error
|
||||||
*/
|
*/
|
||||||
public AbstractFileSystem(final URI uri, final String supportedScheme,
|
public AbstractFileSystem(final URI uri, final String supportedScheme,
|
||||||
|
@ -283,10 +283,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check that the Uri's scheme matches.
|
* Check that the Uri's scheme matches
|
||||||
*
|
* @param uri
|
||||||
* @param uri name URI of the FS.
|
* @param supportedScheme
|
||||||
* @param supportedScheme supported scheme.
|
|
||||||
*/
|
*/
|
||||||
public void checkScheme(URI uri, String supportedScheme) {
|
public void checkScheme(URI uri, String supportedScheme) {
|
||||||
String scheme = uri.getScheme();
|
String scheme = uri.getScheme();
|
||||||
|
@ -363,7 +362,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* If the path is fully qualified URI, then its scheme and authority
|
* If the path is fully qualified URI, then its scheme and authority
|
||||||
* matches that of this file system. Otherwise the path must be
|
* matches that of this file system. Otherwise the path must be
|
||||||
* slash-relative name.
|
* slash-relative name.
|
||||||
* @param path the path.
|
*
|
||||||
* @throws InvalidPathException if the path is invalid
|
* @throws InvalidPathException if the path is invalid
|
||||||
*/
|
*/
|
||||||
public void checkPath(Path path) {
|
public void checkPath(Path path) {
|
||||||
|
@ -432,7 +431,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Make the path fully qualified to this file system
|
* Make the path fully qualified to this file system
|
||||||
* @param path the path.
|
* @param path
|
||||||
* @return the qualified path
|
* @return the qualified path
|
||||||
*/
|
*/
|
||||||
public Path makeQualified(Path path) {
|
public Path makeQualified(Path path) {
|
||||||
|
@ -497,9 +496,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* through any internal symlinks or mount point
|
* through any internal symlinks or mount point
|
||||||
* @param p path to be resolved
|
* @param p path to be resolved
|
||||||
* @return fully qualified path
|
* @return fully qualified path
|
||||||
* @throws FileNotFoundException when file not find throw.
|
* @throws FileNotFoundException
|
||||||
* @throws AccessControlException when accees control error throw.
|
* @throws AccessControlException
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws UnresolvedLinkException if symbolic link on path cannot be
|
* @throws UnresolvedLinkException if symbolic link on path cannot be
|
||||||
* resolved internally
|
* resolved internally
|
||||||
*/
|
*/
|
||||||
|
@ -514,18 +513,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
|
* {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
|
||||||
* that the Path f must be fully qualified and the permission is absolute
|
* that the Path f must be fully qualified and the permission is absolute
|
||||||
* (i.e. umask has been applied).
|
* (i.e. umask has been applied).
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param createFlag create_flag.
|
|
||||||
* @param opts create ops.
|
|
||||||
* @throws AccessControlException access controll exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws ParentNotDirectoryException parent not dir exception.
|
|
||||||
* @throws UnsupportedFileSystemException unsupported file system exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return output stream.
|
|
||||||
*/
|
*/
|
||||||
public final FSDataOutputStream create(final Path f,
|
public final FSDataOutputStream create(final Path f,
|
||||||
final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts)
|
final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts)
|
||||||
|
@ -643,24 +630,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
|
* {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
|
||||||
* have been declared explicitly.
|
* have been declared explicitly.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param flag create flag.
|
|
||||||
* @param absolutePermission absolute permission.
|
|
||||||
* @param bufferSize buffer size.
|
|
||||||
* @param replication replications.
|
|
||||||
* @param blockSize block size.
|
|
||||||
* @param progress progress.
|
|
||||||
* @param checksumOpt check sum opt.
|
|
||||||
* @param createParent create parent.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exists exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws ParentNotDirectoryException parent not directory exception.
|
|
||||||
* @throws UnsupportedFileSystemException unsupported filesystem exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return output stream.
|
|
||||||
*/
|
*/
|
||||||
public abstract FSDataOutputStream createInternal(Path f,
|
public abstract FSDataOutputStream createInternal(Path f,
|
||||||
EnumSet<CreateFlag> flag, FsPermission absolutePermission,
|
EnumSet<CreateFlag> flag, FsPermission absolutePermission,
|
||||||
|
@ -675,14 +644,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
|
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
|
||||||
* f must be fully qualified and the permission is absolute (i.e.
|
* f must be fully qualified and the permission is absolute (i.e.
|
||||||
* umask has been applied).
|
* umask has been applied).
|
||||||
* @param dir directory.
|
|
||||||
* @param permission permission.
|
|
||||||
* @param createParent create parent flag.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exists exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void mkdir(final Path dir, final FsPermission permission,
|
public abstract void mkdir(final Path dir, final FsPermission permission,
|
||||||
final boolean createParent) throws AccessControlException,
|
final boolean createParent) throws AccessControlException,
|
||||||
|
@ -693,14 +654,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#delete(Path, boolean)} except that Path f must be for
|
* {@link FileContext#delete(Path, boolean)} except that Path f must be for
|
||||||
* this file system.
|
* this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param recursive recursive flag.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return if successfully deleted success true, not false.
|
|
||||||
*/
|
*/
|
||||||
public abstract boolean delete(final Path f, final boolean recursive)
|
public abstract boolean delete(final Path f, final boolean recursive)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -710,13 +663,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#open(Path)} except that Path f must be for this
|
* {@link FileContext#open(Path)} except that Path f must be for this
|
||||||
* file system.
|
* file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return input stream.
|
|
||||||
*/
|
*/
|
||||||
public FSDataInputStream open(final Path f) throws AccessControlException,
|
public FSDataInputStream open(final Path f) throws AccessControlException,
|
||||||
FileNotFoundException, UnresolvedLinkException, IOException {
|
FileNotFoundException, UnresolvedLinkException, IOException {
|
||||||
|
@ -727,14 +673,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#open(Path, int)} except that Path f must be for this
|
* {@link FileContext#open(Path, int)} except that Path f must be for this
|
||||||
* file system.
|
* file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param bufferSize buffer size.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return if successfully open success true, not false.
|
|
||||||
*/
|
*/
|
||||||
public abstract FSDataInputStream open(final Path f, int bufferSize)
|
public abstract FSDataInputStream open(final Path f, int bufferSize)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -744,14 +682,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#truncate(Path, long)} except that Path f must be for
|
* {@link FileContext#truncate(Path, long)} except that Path f must be for
|
||||||
* this file system.
|
* this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param newLength new length.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return if successfully truncate success true, not false.
|
|
||||||
*/
|
*/
|
||||||
public boolean truncate(Path f, long newLength)
|
public boolean truncate(Path f, long newLength)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -764,14 +694,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#setReplication(Path, short)} except that Path f must be
|
* {@link FileContext#setReplication(Path, short)} except that Path f must be
|
||||||
* for this file system.
|
* for this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param replication replication.
|
|
||||||
* @return if successfully set replication success true, not false.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract boolean setReplication(final Path f,
|
public abstract boolean setReplication(final Path f,
|
||||||
final short replication) throws AccessControlException,
|
final short replication) throws AccessControlException,
|
||||||
|
@ -781,16 +703,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
|
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
|
||||||
* f must be for this file system.
|
* f must be for this file system.
|
||||||
*
|
|
||||||
* @param src src.
|
|
||||||
* @param dst dst.
|
|
||||||
* @param options options.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exists exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws ParentNotDirectoryException parent not directory exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public final void rename(final Path src, final Path dst,
|
public final void rename(final Path src, final Path dst,
|
||||||
final Options.Rename... options) throws AccessControlException,
|
final Options.Rename... options) throws AccessControlException,
|
||||||
|
@ -815,15 +727,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* File systems that do not have a built in overwrite need implement only this
|
* File systems that do not have a built in overwrite need implement only this
|
||||||
* method and can take advantage of the default impl of the other
|
* method and can take advantage of the default impl of the other
|
||||||
* {@link #renameInternal(Path, Path, boolean)}
|
* {@link #renameInternal(Path, Path, boolean)}
|
||||||
*
|
|
||||||
* @param src src.
|
|
||||||
* @param dst dst.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exists exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws ParentNotDirectoryException parent not directory exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void renameInternal(final Path src, final Path dst)
|
public abstract void renameInternal(final Path src, final Path dst)
|
||||||
throws AccessControlException, FileAlreadyExistsException,
|
throws AccessControlException, FileAlreadyExistsException,
|
||||||
|
@ -834,16 +737,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
|
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
|
||||||
* f must be for this file system.
|
* f must be for this file system.
|
||||||
*
|
|
||||||
* @param src src.
|
|
||||||
* @param dst dst.
|
|
||||||
* @param overwrite overwrite flag.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileAlreadyExistsException file already exists exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws ParentNotDirectoryException parent not directory exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void renameInternal(final Path src, final Path dst,
|
public void renameInternal(final Path src, final Path dst,
|
||||||
boolean overwrite) throws AccessControlException,
|
boolean overwrite) throws AccessControlException,
|
||||||
|
@ -907,12 +800,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#createSymlink(Path, Path, boolean)};
|
* {@link FileContext#createSymlink(Path, Path, boolean)};
|
||||||
*
|
|
||||||
* @param target target.
|
|
||||||
* @param link link.
|
|
||||||
* @param createParent create parent.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
*/
|
*/
|
||||||
public void createSymlink(final Path target, final Path link,
|
public void createSymlink(final Path target, final Path link,
|
||||||
final boolean createParent) throws IOException, UnresolvedLinkException {
|
final boolean createParent) throws IOException, UnresolvedLinkException {
|
||||||
|
@ -923,8 +810,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* Partially resolves the path. This is used during symlink resolution in
|
* Partially resolves the path. This is used during symlink resolution in
|
||||||
* {@link FSLinkResolver}, and differs from the similarly named method
|
* {@link FSLinkResolver}, and differs from the similarly named method
|
||||||
* {@link FileContext#getLinkTarget(Path)}.
|
* {@link FileContext#getLinkTarget(Path)}.
|
||||||
* @param f the path.
|
|
||||||
* @return target path.
|
|
||||||
* @throws IOException subclass implementations may throw IOException
|
* @throws IOException subclass implementations may throw IOException
|
||||||
*/
|
*/
|
||||||
public Path getLinkTarget(final Path f) throws IOException {
|
public Path getLinkTarget(final Path f) throws IOException {
|
||||||
|
@ -937,13 +822,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f
|
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f
|
||||||
* must be for this file system.
|
* must be for this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param permission permission.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void setPermission(final Path f,
|
public abstract void setPermission(final Path f,
|
||||||
final FsPermission permission) throws AccessControlException,
|
final FsPermission permission) throws AccessControlException,
|
||||||
|
@ -953,14 +831,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#setOwner(Path, String, String)} except that Path f must
|
* {@link FileContext#setOwner(Path, String, String)} except that Path f must
|
||||||
* be for this file system.
|
* be for this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param username username.
|
|
||||||
* @param groupname groupname.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void setOwner(final Path f, final String username,
|
public abstract void setOwner(final Path f, final String username,
|
||||||
final String groupname) throws AccessControlException,
|
final String groupname) throws AccessControlException,
|
||||||
|
@ -970,14 +840,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be
|
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be
|
||||||
* for this file system.
|
* for this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param mtime modify time.
|
|
||||||
* @param atime access time.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void setTimes(final Path f, final long mtime,
|
public abstract void setTimes(final Path f, final long mtime,
|
||||||
final long atime) throws AccessControlException, FileNotFoundException,
|
final long atime) throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -987,13 +849,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for
|
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for
|
||||||
* this file system.
|
* this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return File Check sum.
|
|
||||||
*/
|
*/
|
||||||
public abstract FileChecksum getFileChecksum(final Path f)
|
public abstract FileChecksum getFileChecksum(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1004,13 +859,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* {@link FileContext#getFileStatus(Path)}
|
* {@link FileContext#getFileStatus(Path)}
|
||||||
* except that an UnresolvedLinkException may be thrown if a symlink is
|
* except that an UnresolvedLinkException may be thrown if a symlink is
|
||||||
* encountered in the path.
|
* encountered in the path.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return File Status
|
|
||||||
*/
|
*/
|
||||||
public abstract FileStatus getFileStatus(final Path f)
|
public abstract FileStatus getFileStatus(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1022,8 +870,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* In some FileSystem implementations such as HDFS metadata
|
* In some FileSystem implementations such as HDFS metadata
|
||||||
* synchronization is essential to guarantee consistency of read requests
|
* synchronization is essential to guarantee consistency of read requests
|
||||||
* particularly in HA setting.
|
* particularly in HA setting.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
* @throws UnsupportedOperationException Unsupported Operation Exception.
|
* @throws UnsupportedOperationException
|
||||||
*/
|
*/
|
||||||
public void msync() throws IOException, UnsupportedOperationException {
|
public void msync() throws IOException, UnsupportedOperationException {
|
||||||
throw new UnsupportedOperationException(getClass().getCanonicalName() +
|
throw new UnsupportedOperationException(getClass().getCanonicalName() +
|
||||||
|
@ -1035,13 +883,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* {@link FileContext#access(Path, FsAction)}
|
* {@link FileContext#access(Path, FsAction)}
|
||||||
* except that an UnresolvedLinkException may be thrown if a symlink is
|
* except that an UnresolvedLinkException may be thrown if a symlink is
|
||||||
* encountered in the path.
|
* encountered in the path.
|
||||||
*
|
|
||||||
* @param path the path.
|
|
||||||
* @param mode fsaction mode.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
|
@InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
|
||||||
public void access(Path path, FsAction mode) throws AccessControlException,
|
public void access(Path path, FsAction mode) throws AccessControlException,
|
||||||
|
@ -1056,13 +897,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* encountered in the path leading up to the final path component.
|
* encountered in the path leading up to the final path component.
|
||||||
* If the file system does not support symlinks then the behavior is
|
* If the file system does not support symlinks then the behavior is
|
||||||
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
|
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnsupportedFileSystemException UnSupported File System Exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return file status.
|
|
||||||
*/
|
*/
|
||||||
public FileStatus getFileLinkStatus(final Path f)
|
public FileStatus getFileLinkStatus(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1074,15 +908,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that
|
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that
|
||||||
* Path f must be for this file system.
|
* Path f must be for this file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @param start start.
|
|
||||||
* @param len length.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return BlockLocation Array.
|
|
||||||
*/
|
*/
|
||||||
public abstract BlockLocation[] getFileBlockLocations(final Path f,
|
public abstract BlockLocation[] getFileBlockLocations(final Path f,
|
||||||
final long start, final long len) throws AccessControlException,
|
final long start, final long len) throws AccessControlException,
|
||||||
|
@ -1092,13 +917,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this
|
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this
|
||||||
* file system.
|
* file system.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return Fs Status.
|
|
||||||
*/
|
*/
|
||||||
public FsStatus getFsStatus(final Path f) throws AccessControlException,
|
public FsStatus getFsStatus(final Path f) throws AccessControlException,
|
||||||
FileNotFoundException, UnresolvedLinkException, IOException {
|
FileNotFoundException, UnresolvedLinkException, IOException {
|
||||||
|
@ -1109,11 +927,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#getFsStatus(Path)}.
|
* {@link FileContext#getFsStatus(Path)}.
|
||||||
*
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return Fs Status.
|
|
||||||
*/
|
*/
|
||||||
public abstract FsStatus getFsStatus() throws AccessControlException,
|
public abstract FsStatus getFsStatus() throws AccessControlException,
|
||||||
FileNotFoundException, IOException;
|
FileNotFoundException, IOException;
|
||||||
|
@ -1122,13 +935,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#listStatus(Path)} except that Path f must be for this
|
* {@link FileContext#listStatus(Path)} except that Path f must be for this
|
||||||
* file system.
|
* file system.
|
||||||
*
|
|
||||||
* @param f path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return FileStatus Iterator.
|
|
||||||
*/
|
*/
|
||||||
public RemoteIterator<FileStatus> listStatusIterator(final Path f)
|
public RemoteIterator<FileStatus> listStatusIterator(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1161,13 +967,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* will have different formats for replicated and erasure coded file. Please
|
* will have different formats for replicated and erasure coded file. Please
|
||||||
* refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
|
* refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
|
||||||
* for more details.
|
* for more details.
|
||||||
*
|
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return FileStatus Iterator.
|
|
||||||
*/
|
*/
|
||||||
public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
|
public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1200,12 +999,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext.Util#listStatus(Path)} except that Path f must be
|
* {@link FileContext.Util#listStatus(Path)} except that Path f must be
|
||||||
* for this file system.
|
* for this file system.
|
||||||
* @param f the path.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws FileNotFoundException file not found exception.
|
|
||||||
* @throws UnresolvedLinkException unresolved link exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return FileStatus Iterator.
|
|
||||||
*/
|
*/
|
||||||
public abstract FileStatus[] listStatus(final Path f)
|
public abstract FileStatus[] listStatus(final Path f)
|
||||||
throws AccessControlException, FileNotFoundException,
|
throws AccessControlException, FileNotFoundException,
|
||||||
|
@ -1214,8 +1007,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* @return an iterator over the corrupt files under the given path
|
* @return an iterator over the corrupt files under the given path
|
||||||
* (may contain duplicates if a file has more than one corrupt block)
|
* (may contain duplicates if a file has more than one corrupt block)
|
||||||
* @param path the path.
|
* @throws IOException
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
|
public RemoteIterator<Path> listCorruptFileBlocks(Path path)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1228,10 +1020,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
|
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
|
||||||
* must be for this file system.
|
* must be for this file system.
|
||||||
*
|
|
||||||
* @param verifyChecksum verify check sum flag.
|
|
||||||
* @throws AccessControlException access control exception.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public abstract void setVerifyChecksum(final boolean verifyChecksum)
|
public abstract void setVerifyChecksum(final boolean verifyChecksum)
|
||||||
throws AccessControlException, IOException;
|
throws AccessControlException, IOException;
|
||||||
|
@ -1253,7 +1041,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param renewer the account name that is allowed to renew the token.
|
* @param renewer the account name that is allowed to renew the token.
|
||||||
* @return List of delegation tokens.
|
* @return List of delegation tokens.
|
||||||
* If delegation tokens not supported then return a list of size zero.
|
* If delegation tokens not supported then return a list of size zero.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
|
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
|
||||||
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
|
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
|
||||||
|
@ -1353,7 +1141,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param path Path to modify
|
* @param path Path to modify
|
||||||
* @param name xattr name.
|
* @param name xattr name.
|
||||||
* @param value xattr value.
|
* @param value xattr value.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void setXAttr(Path path, String name, byte[] value)
|
public void setXAttr(Path path, String name, byte[] value)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1372,7 +1160,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param name xattr name.
|
* @param name xattr name.
|
||||||
* @param value xattr value.
|
* @param value xattr value.
|
||||||
* @param flag xattr set flag
|
* @param flag xattr set flag
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void setXAttr(Path path, String name, byte[] value,
|
public void setXAttr(Path path, String name, byte[] value,
|
||||||
EnumSet<XAttrSetFlag> flag) throws IOException {
|
EnumSet<XAttrSetFlag> flag) throws IOException {
|
||||||
|
@ -1390,7 +1178,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param path Path to get extended attribute
|
* @param path Path to get extended attribute
|
||||||
* @param name xattr name.
|
* @param name xattr name.
|
||||||
* @return byte[] xattr value.
|
* @return byte[] xattr value.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public byte[] getXAttr(Path path, String name) throws IOException {
|
public byte[] getXAttr(Path path, String name) throws IOException {
|
||||||
throw new UnsupportedOperationException(getClass().getSimpleName()
|
throw new UnsupportedOperationException(getClass().getSimpleName()
|
||||||
|
@ -1408,7 +1196,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
*
|
*
|
||||||
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
||||||
* or directory
|
* or directory
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
|
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
|
||||||
throw new UnsupportedOperationException(getClass().getSimpleName()
|
throw new UnsupportedOperationException(getClass().getSimpleName()
|
||||||
|
@ -1426,7 +1214,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param names XAttr names.
|
* @param names XAttr names.
|
||||||
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
||||||
* or directory
|
* or directory
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public Map<String, byte[]> getXAttrs(Path path, List<String> names)
|
public Map<String, byte[]> getXAttrs(Path path, List<String> names)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1444,7 +1232,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param path Path to get extended attributes
|
* @param path Path to get extended attributes
|
||||||
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
* @return {@literal Map<String, byte[]>} describing the XAttrs of the file
|
||||||
* or directory
|
* or directory
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public List<String> listXAttrs(Path path)
|
public List<String> listXAttrs(Path path)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1461,7 +1249,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
*
|
*
|
||||||
* @param path Path to remove extended attribute
|
* @param path Path to remove extended attribute
|
||||||
* @param name xattr name
|
* @param name xattr name
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void removeXAttr(Path path, String name) throws IOException {
|
public void removeXAttr(Path path, String name) throws IOException {
|
||||||
throw new UnsupportedOperationException(getClass().getSimpleName()
|
throw new UnsupportedOperationException(getClass().getSimpleName()
|
||||||
|
@ -1471,11 +1259,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#createSnapshot(Path, String)}.
|
* {@link FileContext#createSnapshot(Path, String)}.
|
||||||
*
|
|
||||||
* @param path the path.
|
|
||||||
* @param snapshotName snapshot name.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return path.
|
|
||||||
*/
|
*/
|
||||||
public Path createSnapshot(final Path path, final String snapshotName)
|
public Path createSnapshot(final Path path, final String snapshotName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1486,11 +1269,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#renameSnapshot(Path, String, String)}.
|
* {@link FileContext#renameSnapshot(Path, String, String)}.
|
||||||
*
|
|
||||||
* @param path the path.
|
|
||||||
* @param snapshotOldName snapshot old name.
|
|
||||||
* @param snapshotNewName snapshot new name.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void renameSnapshot(final Path path, final String snapshotOldName,
|
public void renameSnapshot(final Path path, final String snapshotOldName,
|
||||||
final String snapshotNewName) throws IOException {
|
final String snapshotNewName) throws IOException {
|
||||||
|
@ -1501,10 +1279,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* The specification of this method matches that of
|
* The specification of this method matches that of
|
||||||
* {@link FileContext#deleteSnapshot(Path, String)}.
|
* {@link FileContext#deleteSnapshot(Path, String)}.
|
||||||
*
|
|
||||||
* @param snapshotDir snapshot dir.
|
|
||||||
* @param snapshotName snapshot name.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
|
public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1515,7 +1289,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* Set the source path to satisfy storage policy.
|
* Set the source path to satisfy storage policy.
|
||||||
* @param path The source path referring to either a directory or a file.
|
* @param path The source path referring to either a directory or a file.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void satisfyStoragePolicy(final Path path) throws IOException {
|
public void satisfyStoragePolicy(final Path path) throws IOException {
|
||||||
throw new UnsupportedOperationException(
|
throw new UnsupportedOperationException(
|
||||||
|
@ -1529,7 +1303,6 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* @param policyName the name of the target storage policy. The list
|
* @param policyName the name of the target storage policy. The list
|
||||||
* of supported Storage policies can be retrieved
|
* of supported Storage policies can be retrieved
|
||||||
* via {@link #getAllStoragePolicies}.
|
* via {@link #getAllStoragePolicies}.
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public void setStoragePolicy(final Path path, final String policyName)
|
public void setStoragePolicy(final Path path, final String policyName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1541,7 +1314,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
/**
|
/**
|
||||||
* Unset the storage policy set for a given file or directory.
|
* Unset the storage policy set for a given file or directory.
|
||||||
* @param src file or directory path.
|
* @param src file or directory path.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void unsetStoragePolicy(final Path src) throws IOException {
|
public void unsetStoragePolicy(final Path src) throws IOException {
|
||||||
throw new UnsupportedOperationException(getClass().getSimpleName()
|
throw new UnsupportedOperationException(getClass().getSimpleName()
|
||||||
|
@ -1553,7 +1326,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
*
|
*
|
||||||
* @param src file or directory path.
|
* @param src file or directory path.
|
||||||
* @return storage policy for give file.
|
* @return storage policy for give file.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public BlockStoragePolicySpi getStoragePolicy(final Path src)
|
public BlockStoragePolicySpi getStoragePolicy(final Path src)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -1565,7 +1338,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
|
||||||
* Retrieve all the storage policies supported by this file system.
|
* Retrieve all the storage policies supported by this file system.
|
||||||
*
|
*
|
||||||
* @return all storage policies supported by this filesystem.
|
* @return all storage policies supported by this filesystem.
|
||||||
* @throws IOException raised on errors performing I/O.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
|
public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -25,10 +25,6 @@ import org.apache.avro.file.SeekableInput;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY;
|
|
||||||
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL;
|
|
||||||
import static org.apache.hadoop.util.functional.FutureIO.awaitFuture;
|
|
||||||
|
|
||||||
/** Adapts an {@link FSDataInputStream} to Avro's SeekableInput interface. */
|
/** Adapts an {@link FSDataInputStream} to Avro's SeekableInput interface. */
|
||||||
@InterfaceAudience.Public
|
@InterfaceAudience.Public
|
||||||
@InterfaceStability.Stable
|
@InterfaceStability.Stable
|
||||||
|
@ -36,30 +32,17 @@ public class AvroFSInput implements Closeable, SeekableInput {
|
||||||
private final FSDataInputStream stream;
|
private final FSDataInputStream stream;
|
||||||
private final long len;
|
private final long len;
|
||||||
|
|
||||||
/**
|
/** Construct given an {@link FSDataInputStream} and its length. */
|
||||||
* Construct given an {@link FSDataInputStream} and its length.
|
|
||||||
*
|
|
||||||
* @param in inputstream.
|
|
||||||
* @param len len.
|
|
||||||
*/
|
|
||||||
public AvroFSInput(final FSDataInputStream in, final long len) {
|
public AvroFSInput(final FSDataInputStream in, final long len) {
|
||||||
this.stream = in;
|
this.stream = in;
|
||||||
this.len = len;
|
this.len = len;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Construct given a {@link FileContext} and a {@link Path}.
|
/** Construct given a {@link FileContext} and a {@link Path}. */
|
||||||
* @param fc filecontext.
|
|
||||||
* @param p the path.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
* */
|
|
||||||
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
|
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
|
||||||
FileStatus status = fc.getFileStatus(p);
|
FileStatus status = fc.getFileStatus(p);
|
||||||
this.len = status.getLen();
|
this.len = status.getLen();
|
||||||
this.stream = awaitFuture(fc.openFile(p)
|
this.stream = fc.open(p);
|
||||||
.opt(FS_OPTION_OPENFILE_READ_POLICY,
|
|
||||||
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
|
|
||||||
.withFileStatus(status)
|
|
||||||
.build());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -68,7 +68,6 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
|
||||||
*
|
*
|
||||||
* @param prevKey The key to send.
|
* @param prevKey The key to send.
|
||||||
* @return A list of replies.
|
* @return A list of replies.
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public abstract BatchedEntries<E> makeRequest(K prevKey) throws IOException;
|
public abstract BatchedEntries<E> makeRequest(K prevKey) throws IOException;
|
||||||
|
|
||||||
|
@ -103,8 +102,6 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the next list key associated with an element.
|
* Return the next list key associated with an element.
|
||||||
* @param element element.
|
|
||||||
* @return K Generics Type.
|
|
||||||
*/
|
*/
|
||||||
public abstract K elementToPrevKey(E element);
|
public abstract K elementToPrevKey(E element);
|
||||||
|
|
||||||
|
|
|
@ -85,7 +85,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copy constructor.
|
* Copy constructor.
|
||||||
* @param that blocklocation.
|
|
||||||
*/
|
*/
|
||||||
public BlockLocation(BlockLocation that) {
|
public BlockLocation(BlockLocation that) {
|
||||||
this.hosts = that.hosts;
|
this.hosts = that.hosts;
|
||||||
|
@ -101,10 +100,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor with host, name, offset and length.
|
* Constructor with host, name, offset and length.
|
||||||
* @param names names array.
|
|
||||||
* @param hosts host array.
|
|
||||||
* @param offset offset.
|
|
||||||
* @param length length.
|
|
||||||
*/
|
*/
|
||||||
public BlockLocation(String[] names, String[] hosts, long offset,
|
public BlockLocation(String[] names, String[] hosts, long offset,
|
||||||
long length) {
|
long length) {
|
||||||
|
@ -113,11 +108,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor with host, name, offset, length and corrupt flag.
|
* Constructor with host, name, offset, length and corrupt flag.
|
||||||
* @param names names.
|
|
||||||
* @param hosts hosts.
|
|
||||||
* @param offset offset.
|
|
||||||
* @param length length.
|
|
||||||
* @param corrupt corrupt.
|
|
||||||
*/
|
*/
|
||||||
public BlockLocation(String[] names, String[] hosts, long offset,
|
public BlockLocation(String[] names, String[] hosts, long offset,
|
||||||
long length, boolean corrupt) {
|
long length, boolean corrupt) {
|
||||||
|
@ -126,11 +116,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor with host, name, network topology, offset and length.
|
* Constructor with host, name, network topology, offset and length.
|
||||||
* @param names names.
|
|
||||||
* @param hosts hosts.
|
|
||||||
* @param topologyPaths topologyPaths.
|
|
||||||
* @param offset offset.
|
|
||||||
* @param length length.
|
|
||||||
*/
|
*/
|
||||||
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
|
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
|
||||||
long offset, long length) {
|
long offset, long length) {
|
||||||
|
@ -140,12 +125,6 @@ public class BlockLocation implements Serializable {
|
||||||
/**
|
/**
|
||||||
* Constructor with host, name, network topology, offset, length
|
* Constructor with host, name, network topology, offset, length
|
||||||
* and corrupt flag.
|
* and corrupt flag.
|
||||||
* @param names names.
|
|
||||||
* @param hosts hosts.
|
|
||||||
* @param topologyPaths topologyPaths.
|
|
||||||
* @param offset offset.
|
|
||||||
* @param length length.
|
|
||||||
* @param corrupt corrupt.
|
|
||||||
*/
|
*/
|
||||||
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
|
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
|
||||||
long offset, long length, boolean corrupt) {
|
long offset, long length, boolean corrupt) {
|
||||||
|
@ -198,8 +177,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the list of hosts (hostname) hosting this block.
|
* Get the list of hosts (hostname) hosting this block.
|
||||||
* @return hosts array.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public String[] getHosts() throws IOException {
|
public String[] getHosts() throws IOException {
|
||||||
return hosts;
|
return hosts;
|
||||||
|
@ -207,7 +184,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the list of hosts (hostname) hosting a cached replica of the block.
|
* Get the list of hosts (hostname) hosting a cached replica of the block.
|
||||||
* @return cached hosts.
|
|
||||||
*/
|
*/
|
||||||
public String[] getCachedHosts() {
|
public String[] getCachedHosts() {
|
||||||
return cachedHosts;
|
return cachedHosts;
|
||||||
|
@ -215,8 +191,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the list of names (IP:xferPort) hosting this block.
|
* Get the list of names (IP:xferPort) hosting this block.
|
||||||
* @return names array.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public String[] getNames() throws IOException {
|
public String[] getNames() throws IOException {
|
||||||
return names;
|
return names;
|
||||||
|
@ -225,8 +199,6 @@ public class BlockLocation implements Serializable {
|
||||||
/**
|
/**
|
||||||
* Get the list of network topology paths for each of the hosts.
|
* Get the list of network topology paths for each of the hosts.
|
||||||
* The last component of the path is the "name" (IP:xferPort).
|
* The last component of the path is the "name" (IP:xferPort).
|
||||||
* @return topology paths.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public String[] getTopologyPaths() throws IOException {
|
public String[] getTopologyPaths() throws IOException {
|
||||||
return topologyPaths;
|
return topologyPaths;
|
||||||
|
@ -234,7 +206,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the storageID of each replica of the block.
|
* Get the storageID of each replica of the block.
|
||||||
* @return storage ids.
|
|
||||||
*/
|
*/
|
||||||
public String[] getStorageIds() {
|
public String[] getStorageIds() {
|
||||||
return storageIds;
|
return storageIds;
|
||||||
|
@ -242,7 +213,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the storage type of each replica of the block.
|
* Get the storage type of each replica of the block.
|
||||||
* @return storage type of each replica of the block.
|
|
||||||
*/
|
*/
|
||||||
public StorageType[] getStorageTypes() {
|
public StorageType[] getStorageTypes() {
|
||||||
return storageTypes;
|
return storageTypes;
|
||||||
|
@ -250,7 +220,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the start offset of file associated with this block.
|
* Get the start offset of file associated with this block.
|
||||||
* @return start offset of file associated with this block.
|
|
||||||
*/
|
*/
|
||||||
public long getOffset() {
|
public long getOffset() {
|
||||||
return offset;
|
return offset;
|
||||||
|
@ -258,7 +227,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the length of the block.
|
* Get the length of the block.
|
||||||
* @return length of the block.
|
|
||||||
*/
|
*/
|
||||||
public long getLength() {
|
public long getLength() {
|
||||||
return length;
|
return length;
|
||||||
|
@ -266,7 +234,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the corrupt flag.
|
* Get the corrupt flag.
|
||||||
* @return corrupt flag.
|
|
||||||
*/
|
*/
|
||||||
public boolean isCorrupt() {
|
public boolean isCorrupt() {
|
||||||
return corrupt;
|
return corrupt;
|
||||||
|
@ -274,7 +241,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return true if the block is striped (erasure coded).
|
* Return true if the block is striped (erasure coded).
|
||||||
* @return if the block is striped true, not false.
|
|
||||||
*/
|
*/
|
||||||
public boolean isStriped() {
|
public boolean isStriped() {
|
||||||
return false;
|
return false;
|
||||||
|
@ -282,7 +248,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the start offset of file associated with this block.
|
* Set the start offset of file associated with this block.
|
||||||
* @param offset start offset.
|
|
||||||
*/
|
*/
|
||||||
public void setOffset(long offset) {
|
public void setOffset(long offset) {
|
||||||
this.offset = offset;
|
this.offset = offset;
|
||||||
|
@ -290,7 +255,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the length of block.
|
* Set the length of block.
|
||||||
* @param length length of block.
|
|
||||||
*/
|
*/
|
||||||
public void setLength(long length) {
|
public void setLength(long length) {
|
||||||
this.length = length;
|
this.length = length;
|
||||||
|
@ -298,7 +262,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the corrupt flag.
|
* Set the corrupt flag.
|
||||||
* @param corrupt corrupt flag.
|
|
||||||
*/
|
*/
|
||||||
public void setCorrupt(boolean corrupt) {
|
public void setCorrupt(boolean corrupt) {
|
||||||
this.corrupt = corrupt;
|
this.corrupt = corrupt;
|
||||||
|
@ -306,8 +269,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the hosts hosting this block.
|
* Set the hosts hosting this block.
|
||||||
* @param hosts hosts array.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public void setHosts(String[] hosts) throws IOException {
|
public void setHosts(String[] hosts) throws IOException {
|
||||||
if (hosts == null) {
|
if (hosts == null) {
|
||||||
|
@ -319,7 +280,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the hosts hosting a cached replica of this block.
|
* Set the hosts hosting a cached replica of this block.
|
||||||
* @param cachedHosts cached hosts.
|
|
||||||
*/
|
*/
|
||||||
public void setCachedHosts(String[] cachedHosts) {
|
public void setCachedHosts(String[] cachedHosts) {
|
||||||
if (cachedHosts == null) {
|
if (cachedHosts == null) {
|
||||||
|
@ -331,8 +291,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the names (host:port) hosting this block.
|
* Set the names (host:port) hosting this block.
|
||||||
* @param names names.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public void setNames(String[] names) throws IOException {
|
public void setNames(String[] names) throws IOException {
|
||||||
if (names == null) {
|
if (names == null) {
|
||||||
|
@ -344,9 +302,6 @@ public class BlockLocation implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the network topology paths of the hosts.
|
* Set the network topology paths of the hosts.
|
||||||
*
|
|
||||||
* @param topologyPaths topology paths.
|
|
||||||
* @throws IOException If an I/O error occurred.
|
|
||||||
*/
|
*/
|
||||||
public void setTopologyPaths(String[] topologyPaths) throws IOException {
|
public void setTopologyPaths(String[] topologyPaths) throws IOException {
|
||||||
if (topologyPaths == null) {
|
if (topologyPaths == null) {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/*
|
/**
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -22,9 +22,6 @@ import java.io.EOFException;
|
||||||
import java.io.FileDescriptor;
|
import java.io.FileDescriptor;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.StringJoiner;
|
import java.util.StringJoiner;
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
@ -165,20 +162,4 @@ public class BufferedFSInputStream extends BufferedInputStream
|
||||||
.add("in=" + in)
|
.add("in=" + in)
|
||||||
.toString();
|
.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int minSeekForVectorReads() {
|
|
||||||
return ((PositionedReadable) in).minSeekForVectorReads();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int maxReadSizeForVectorReads() {
|
|
||||||
return ((PositionedReadable) in).maxReadSizeForVectorReads();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readVectored(List<? extends FileRange> ranges,
|
|
||||||
IntFunction<ByteBuffer> allocate) throws IOException {
|
|
||||||
((PositionedReadable) in).readVectored(ranges, allocate);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,12 +47,6 @@ public final class ByteBufferUtil {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform a fallback read.
|
* Perform a fallback read.
|
||||||
*
|
|
||||||
* @param stream input stream.
|
|
||||||
* @param bufferPool bufferPool.
|
|
||||||
* @param maxLength maxLength.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
* @return byte buffer.
|
|
||||||
*/
|
*/
|
||||||
public static ByteBuffer fallbackRead(
|
public static ByteBuffer fallbackRead(
|
||||||
InputStream stream, ByteBufferPool bufferPool, int maxLength)
|
InputStream stream, ByteBufferPool bufferPool, int maxLength)
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.fs;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.classification.VisibleForTesting;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -53,9 +52,6 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
|
||||||
/**
|
/**
|
||||||
* This is the constructor used by the builder.
|
* This is the constructor used by the builder.
|
||||||
* All overriding classes should implement this.
|
* All overriding classes should implement this.
|
||||||
*
|
|
||||||
* @param builder builder.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
|
public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -93,19 +89,19 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
|
||||||
if (!shouldFirstRefresh) {
|
if (!shouldFirstRefresh) {
|
||||||
// Skip initial refresh operation, so we need to do first refresh
|
// Skip initial refresh operation, so we need to do first refresh
|
||||||
// operation immediately in refresh thread.
|
// operation immediately in refresh thread.
|
||||||
initRefreshThread(true);
|
initRefeshThread(true);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
refresh();
|
refresh();
|
||||||
}
|
}
|
||||||
initRefreshThread(false);
|
initRefeshThread(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RunImmediately should set true, if we skip the first refresh.
|
* RunImmediately should set true, if we skip the first refresh.
|
||||||
* @param runImmediately The param default should be false.
|
* @param runImmediately The param default should be false.
|
||||||
*/
|
*/
|
||||||
private void initRefreshThread(boolean runImmediately) {
|
private void initRefeshThread (boolean runImmediately) {
|
||||||
if (refreshInterval > 0) {
|
if (refreshInterval > 0) {
|
||||||
refreshUsed = new Thread(new RefreshThread(this, runImmediately),
|
refreshUsed = new Thread(new RefreshThread(this, runImmediately),
|
||||||
"refreshUsed-" + dirPath);
|
"refreshUsed-" + dirPath);
|
||||||
|
@ -143,8 +139,6 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Increment the cached value of used space.
|
* Increment the cached value of used space.
|
||||||
*
|
|
||||||
* @param value dfs used value.
|
|
||||||
*/
|
*/
|
||||||
public void incDfsUsed(long value) {
|
public void incDfsUsed(long value) {
|
||||||
used.addAndGet(value);
|
used.addAndGet(value);
|
||||||
|
@ -159,25 +153,11 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* How long in between runs of the background refresh.
|
* How long in between runs of the background refresh.
|
||||||
*
|
|
||||||
* @return refresh interval.
|
|
||||||
*/
|
*/
|
||||||
@VisibleForTesting
|
long getRefreshInterval() {
|
||||||
public long getRefreshInterval() {
|
|
||||||
return refreshInterval;
|
return refreshInterval;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Randomize the refresh interval timing by this amount, the actual interval will be chosen
|
|
||||||
* uniformly between {@code interval-jitter} and {@code interval+jitter}.
|
|
||||||
*
|
|
||||||
* @return between interval-jitter and interval+jitter.
|
|
||||||
*/
|
|
||||||
@VisibleForTesting
|
|
||||||
public long getJitter() {
|
|
||||||
return jitter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reset the current used data amount. This should be called
|
* Reset the current used data amount. This should be called
|
||||||
* when the cached value is re-computed.
|
* when the cached value is re-computed.
|
||||||
|
|
|
@ -22,24 +22,18 @@ import java.io.EOFException;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.nio.IntBuffer;
|
|
||||||
import java.nio.channels.ClosedChannelException;
|
import java.nio.channels.ClosedChannelException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.CompletionException;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
import java.util.zip.CRC32;
|
|
||||||
|
|
||||||
import org.apache.hadoop.util.Preconditions;
|
import org.apache.hadoop.util.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.impl.AbstractFSBuilderImpl;
|
import org.apache.hadoop.fs.impl.AbstractFSBuilderImpl;
|
||||||
import org.apache.hadoop.fs.impl.CombinedFileRange;
|
|
||||||
import org.apache.hadoop.fs.impl.FutureDataInputStreamBuilderImpl;
|
import org.apache.hadoop.fs.impl.FutureDataInputStreamBuilderImpl;
|
||||||
import org.apache.hadoop.fs.impl.OpenFileParameters;
|
import org.apache.hadoop.fs.impl.OpenFileParameters;
|
||||||
import org.apache.hadoop.fs.permission.AclEntry;
|
import org.apache.hadoop.fs.permission.AclEntry;
|
||||||
|
@ -51,10 +45,8 @@ import org.apache.hadoop.util.DataChecksum;
|
||||||
import org.apache.hadoop.util.LambdaUtils;
|
import org.apache.hadoop.util.LambdaUtils;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS;
|
|
||||||
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
|
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
|
||||||
import static org.apache.hadoop.fs.impl.StoreImplementationUtils.isProbeForSyncable;
|
import static org.apache.hadoop.fs.impl.StoreImplementationUtils.isProbeForSyncable;
|
||||||
import static org.apache.hadoop.fs.VectoredReadUtils.sortRanges;
|
|
||||||
|
|
||||||
/****************************************************************
|
/****************************************************************
|
||||||
* Abstract Checksumed FileSystem.
|
* Abstract Checksumed FileSystem.
|
||||||
|
@ -110,44 +102,25 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
return fs;
|
return fs;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the name of the checksum file associated with a file.*/
|
||||||
* Return the name of the checksum file associated with a file.
|
|
||||||
*
|
|
||||||
* @param file the file path.
|
|
||||||
* @return name of the checksum file associated with a file.
|
|
||||||
*/
|
|
||||||
public Path getChecksumFile(Path file) {
|
public Path getChecksumFile(Path file) {
|
||||||
return new Path(file.getParent(), "." + file.getName() + ".crc");
|
return new Path(file.getParent(), "." + file.getName() + ".crc");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return true iff file is a checksum file name.*/
|
||||||
* Return true if file is a checksum file name.
|
|
||||||
*
|
|
||||||
* @param file the file path.
|
|
||||||
* @return if file is a checksum file true, not false.
|
|
||||||
*/
|
|
||||||
public static boolean isChecksumFile(Path file) {
|
public static boolean isChecksumFile(Path file) {
|
||||||
String name = file.getName();
|
String name = file.getName();
|
||||||
return name.startsWith(".") && name.endsWith(".crc");
|
return name.startsWith(".") && name.endsWith(".crc");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the length of the checksum file given the size of the
|
||||||
* Return the length of the checksum file given the size of the
|
|
||||||
* actual file.
|
* actual file.
|
||||||
*
|
**/
|
||||||
* @param file the file path.
|
|
||||||
* @param fileSize file size.
|
|
||||||
* @return checksum length.
|
|
||||||
*/
|
|
||||||
public long getChecksumFileLength(Path file, long fileSize) {
|
public long getChecksumFileLength(Path file, long fileSize) {
|
||||||
return getChecksumLength(fileSize, getBytesPerSum());
|
return getChecksumLength(fileSize, getBytesPerSum());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the bytes Per Checksum */
|
||||||
* Return the bytes Per Checksum.
|
|
||||||
*
|
|
||||||
* @return bytes per check sum.
|
|
||||||
*/
|
|
||||||
public int getBytesPerSum() {
|
public int getBytesPerSum() {
|
||||||
return bytesPerChecksum;
|
return bytesPerChecksum;
|
||||||
}
|
}
|
||||||
|
@ -166,7 +139,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* It verifies that data matches checksums.
|
* It verifies that data matches checksums.
|
||||||
*******************************************************/
|
*******************************************************/
|
||||||
private static class ChecksumFSInputChecker extends FSInputChecker implements
|
private static class ChecksumFSInputChecker extends FSInputChecker implements
|
||||||
IOStatisticsSource, StreamCapabilities {
|
IOStatisticsSource {
|
||||||
private ChecksumFileSystem fs;
|
private ChecksumFileSystem fs;
|
||||||
private FSDataInputStream datas;
|
private FSDataInputStream datas;
|
||||||
private FSDataInputStream sums;
|
private FSDataInputStream sums;
|
||||||
|
@ -174,7 +147,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
private static final int HEADER_LENGTH = 8;
|
private static final int HEADER_LENGTH = 8;
|
||||||
|
|
||||||
private int bytesPerSum = 1;
|
private int bytesPerSum = 1;
|
||||||
private long fileLen = -1L;
|
|
||||||
|
|
||||||
public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file)
|
public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -198,8 +170,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
if (!Arrays.equals(version, CHECKSUM_VERSION))
|
if (!Arrays.equals(version, CHECKSUM_VERSION))
|
||||||
throw new IOException("Not a checksum file: "+sumFile);
|
throw new IOException("Not a checksum file: "+sumFile);
|
||||||
this.bytesPerSum = sums.readInt();
|
this.bytesPerSum = sums.readInt();
|
||||||
set(fs.verifyChecksum, DataChecksum.newCrc32(), bytesPerSum,
|
set(fs.verifyChecksum, DataChecksum.newCrc32(), bytesPerSum, 4);
|
||||||
FSInputChecker.CHECKSUM_SIZE);
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// mincing the message is terrible, but java throws permission
|
// mincing the message is terrible, but java throws permission
|
||||||
// exceptions as FNF because that's all the method signatures allow!
|
// exceptions as FNF because that's all the method signatures allow!
|
||||||
|
@ -213,7 +184,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getChecksumFilePos( long dataPos ) {
|
private long getChecksumFilePos( long dataPos ) {
|
||||||
return HEADER_LENGTH + FSInputChecker.CHECKSUM_SIZE*(dataPos/bytesPerSum);
|
return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -315,185 +286,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
public IOStatistics getIOStatistics() {
|
public IOStatistics getIOStatistics() {
|
||||||
return IOStatisticsSupport.retrieveIOStatistics(datas);
|
return IOStatisticsSupport.retrieveIOStatistics(datas);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static long findChecksumOffset(long dataOffset,
|
|
||||||
int bytesPerSum) {
|
|
||||||
return HEADER_LENGTH + (dataOffset/bytesPerSum) * FSInputChecker.CHECKSUM_SIZE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate length of file if not already cached.
|
|
||||||
* @return file length.
|
|
||||||
* @throws IOException any IOE.
|
|
||||||
*/
|
|
||||||
private long getFileLength() throws IOException {
|
|
||||||
if (fileLen == -1L) {
|
|
||||||
fileLen = fs.getFileStatus(file).getLen();
|
|
||||||
}
|
|
||||||
return fileLen;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the checksum ranges that correspond to the given data ranges.
|
|
||||||
* @param dataRanges the input data ranges, which are assumed to be sorted
|
|
||||||
* and non-overlapping
|
|
||||||
* @return a list of AsyncReaderUtils.CombinedFileRange that correspond to
|
|
||||||
* the checksum ranges
|
|
||||||
*/
|
|
||||||
public static List<CombinedFileRange> findChecksumRanges(
|
|
||||||
List<? extends FileRange> dataRanges,
|
|
||||||
int bytesPerSum,
|
|
||||||
int minSeek,
|
|
||||||
int maxSize) {
|
|
||||||
List<CombinedFileRange> result = new ArrayList<>();
|
|
||||||
CombinedFileRange currentCrc = null;
|
|
||||||
for(FileRange range: dataRanges) {
|
|
||||||
long crcOffset = findChecksumOffset(range.getOffset(), bytesPerSum);
|
|
||||||
long crcEnd = findChecksumOffset(range.getOffset() + range.getLength() +
|
|
||||||
bytesPerSum - 1, bytesPerSum);
|
|
||||||
if (currentCrc == null ||
|
|
||||||
!currentCrc.merge(crcOffset, crcEnd, range, minSeek, maxSize)) {
|
|
||||||
currentCrc = new CombinedFileRange(crcOffset, crcEnd, range);
|
|
||||||
result.add(currentCrc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check the data against the checksums.
|
|
||||||
* @param sumsBytes the checksum data
|
|
||||||
* @param sumsOffset where from the checksum file this buffer started
|
|
||||||
* @param data the file data
|
|
||||||
* @param dataOffset where the file data started (must be a multiple of
|
|
||||||
* bytesPerSum)
|
|
||||||
* @param bytesPerSum how many bytes per a checksum
|
|
||||||
* @param file the path of the filename
|
|
||||||
* @return the data buffer
|
|
||||||
* @throws CompletionException if the checksums don't match
|
|
||||||
*/
|
|
||||||
static ByteBuffer checkBytes(ByteBuffer sumsBytes,
|
|
||||||
long sumsOffset,
|
|
||||||
ByteBuffer data,
|
|
||||||
long dataOffset,
|
|
||||||
int bytesPerSum,
|
|
||||||
Path file) {
|
|
||||||
// determine how many bytes we need to skip at the start of the sums
|
|
||||||
int offset =
|
|
||||||
(int) (findChecksumOffset(dataOffset, bytesPerSum) - sumsOffset);
|
|
||||||
IntBuffer sums = sumsBytes.asIntBuffer();
|
|
||||||
sums.position(offset / FSInputChecker.CHECKSUM_SIZE);
|
|
||||||
ByteBuffer current = data.duplicate();
|
|
||||||
int numFullChunks = data.remaining() / bytesPerSum;
|
|
||||||
boolean partialChunk = ((data.remaining() % bytesPerSum) != 0);
|
|
||||||
int totalChunks = numFullChunks;
|
|
||||||
if (partialChunk) {
|
|
||||||
totalChunks++;
|
|
||||||
}
|
|
||||||
CRC32 crc = new CRC32();
|
|
||||||
// check each chunk to ensure they match
|
|
||||||
for(int c = 0; c < totalChunks; ++c) {
|
|
||||||
// set the buffer position to the start of every chunk.
|
|
||||||
current.position(c * bytesPerSum);
|
|
||||||
|
|
||||||
if (c == numFullChunks) {
|
|
||||||
// During last chunk, there may be less than chunk size
|
|
||||||
// data preset, so setting the limit accordingly.
|
|
||||||
int lastIncompleteChunk = data.remaining() % bytesPerSum;
|
|
||||||
current.limit((c * bytesPerSum) + lastIncompleteChunk);
|
|
||||||
} else {
|
|
||||||
// set the buffer limit to end of every chunk.
|
|
||||||
current.limit((c + 1) * bytesPerSum);
|
|
||||||
}
|
|
||||||
|
|
||||||
// compute the crc
|
|
||||||
crc.reset();
|
|
||||||
crc.update(current);
|
|
||||||
int expected = sums.get();
|
|
||||||
int calculated = (int) crc.getValue();
|
|
||||||
|
|
||||||
if (calculated != expected) {
|
|
||||||
// cast of c added to silence findbugs
|
|
||||||
long errPosn = dataOffset + (long) c * bytesPerSum;
|
|
||||||
throw new CompletionException(new ChecksumException(
|
|
||||||
"Checksum error: " + file + " at " + errPosn +
|
|
||||||
" exp: " + expected + " got: " + calculated, errPosn));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if everything matches, we return the data
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates range parameters.
|
|
||||||
* In case of CheckSum FS, we already have calculated
|
|
||||||
* fileLength so failing fast here.
|
|
||||||
* @param ranges requested ranges.
|
|
||||||
* @param fileLength length of file.
|
|
||||||
* @throws EOFException end of file exception.
|
|
||||||
*/
|
|
||||||
private void validateRangeRequest(List<? extends FileRange> ranges,
|
|
||||||
final long fileLength) throws EOFException {
|
|
||||||
for (FileRange range : ranges) {
|
|
||||||
VectoredReadUtils.validateRangeRequest(range);
|
|
||||||
if (range.getOffset() + range.getLength() > fileLength) {
|
|
||||||
final String errMsg = String.format("Requested range [%d, %d) is beyond EOF for path %s",
|
|
||||||
range.getOffset(), range.getLength(), file);
|
|
||||||
LOG.warn(errMsg);
|
|
||||||
throw new EOFException(errMsg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readVectored(List<? extends FileRange> ranges,
|
|
||||||
IntFunction<ByteBuffer> allocate) throws IOException {
|
|
||||||
final long length = getFileLength();
|
|
||||||
validateRangeRequest(ranges, length);
|
|
||||||
|
|
||||||
// If the stream doesn't have checksums, just delegate.
|
|
||||||
if (sums == null) {
|
|
||||||
datas.readVectored(ranges, allocate);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
int minSeek = minSeekForVectorReads();
|
|
||||||
int maxSize = maxReadSizeForVectorReads();
|
|
||||||
List<CombinedFileRange> dataRanges =
|
|
||||||
VectoredReadUtils.mergeSortedRanges(Arrays.asList(sortRanges(ranges)), bytesPerSum,
|
|
||||||
minSeek, maxReadSizeForVectorReads());
|
|
||||||
// While merging the ranges above, they are rounded up based on the value of bytesPerSum
|
|
||||||
// which leads to some ranges crossing the EOF thus they need to be fixed else it will
|
|
||||||
// cause EOFException during actual reads.
|
|
||||||
for (CombinedFileRange range : dataRanges) {
|
|
||||||
if (range.getOffset() + range.getLength() > length) {
|
|
||||||
range.setLength((int) (length - range.getOffset()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
List<CombinedFileRange> checksumRanges = findChecksumRanges(dataRanges,
|
|
||||||
bytesPerSum, minSeek, maxSize);
|
|
||||||
sums.readVectored(checksumRanges, allocate);
|
|
||||||
datas.readVectored(dataRanges, allocate);
|
|
||||||
for(CombinedFileRange checksumRange: checksumRanges) {
|
|
||||||
for(FileRange dataRange: checksumRange.getUnderlying()) {
|
|
||||||
// when we have both the ranges, validate the checksum
|
|
||||||
CompletableFuture<ByteBuffer> result =
|
|
||||||
checksumRange.getData().thenCombineAsync(dataRange.getData(),
|
|
||||||
(sumBuffer, dataBuffer) ->
|
|
||||||
checkBytes(sumBuffer, checksumRange.getOffset(),
|
|
||||||
dataBuffer, dataRange.getOffset(), bytesPerSum, file));
|
|
||||||
// Now, slice the read data range to the user's ranges
|
|
||||||
for(FileRange original: ((CombinedFileRange) dataRange).getUnderlying()) {
|
|
||||||
original.setData(result.thenApply(
|
|
||||||
(b) -> VectoredReadUtils.sliceTo(b, dataRange.getOffset(), original)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasCapability(String capability) {
|
|
||||||
return datas.hasCapability(capability);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class FSDataBoundedInputStream extends FSDataInputStream {
|
private static class FSDataBoundedInputStream extends FSDataInputStream {
|
||||||
|
@ -570,7 +362,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* Opens an FSDataInputStream at the indicated Path.
|
* Opens an FSDataInputStream at the indicated Path.
|
||||||
* @param f the file name to open
|
* @param f the file name to open
|
||||||
* @param bufferSize the size of the buffer to be used.
|
* @param bufferSize the size of the buffer to be used.
|
||||||
* @throws IOException if an I/O error occurs.
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
|
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
|
||||||
|
@ -614,8 +405,8 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
public static long getChecksumLength(long size, int bytesPerSum) {
|
public static long getChecksumLength(long size, int bytesPerSum) {
|
||||||
//the checksum length is equal to size passed divided by bytesPerSum +
|
//the checksum length is equal to size passed divided by bytesPerSum +
|
||||||
//bytes written in the beginning of the checksum file.
|
//bytes written in the beginning of the checksum file.
|
||||||
return ((size + bytesPerSum - 1) / bytesPerSum) * FSInputChecker.CHECKSUM_SIZE +
|
return ((size + bytesPerSum - 1) / bytesPerSum) * 4 +
|
||||||
ChecksumFSInputChecker.HEADER_LENGTH;
|
CHECKSUM_VERSION.length + 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** This class provides an output stream for a checksummed file.
|
/** This class provides an output stream for a checksummed file.
|
||||||
|
@ -627,7 +418,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
private static final float CHKSUM_AS_FRACTION = 0.01f;
|
private static final float CHKSUM_AS_FRACTION = 0.01f;
|
||||||
private boolean isClosed = false;
|
private boolean isClosed = false;
|
||||||
|
|
||||||
ChecksumFSOutputSummer(ChecksumFileSystem fs,
|
public ChecksumFSOutputSummer(ChecksumFileSystem fs,
|
||||||
Path file,
|
Path file,
|
||||||
boolean overwrite,
|
boolean overwrite,
|
||||||
int bufferSize,
|
int bufferSize,
|
||||||
|
@ -878,7 +669,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
|
* Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
|
||||||
* @param src file name
|
* @param src file name
|
||||||
* @param replication new replication
|
* @param replication new replication
|
||||||
* @throws IOException if an I/O error occurs.
|
* @throws IOException
|
||||||
* @return true if successful;
|
* @return true if successful;
|
||||||
* false if file does not exist or is a directory
|
* false if file does not exist or is a directory
|
||||||
*/
|
*/
|
||||||
|
@ -963,7 +754,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* @param f
|
* @param f
|
||||||
* given path
|
* given path
|
||||||
* @return the statuses of the files/directories in the given path
|
* @return the statuses of the files/directories in the given path
|
||||||
* @throws IOException if an I/O error occurs.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FileStatus[] listStatus(Path f) throws IOException {
|
public FileStatus[] listStatus(Path f) throws IOException {
|
||||||
|
@ -984,7 +775,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* @param f
|
* @param f
|
||||||
* given path
|
* given path
|
||||||
* @return the statuses of the files/directories in the given patch
|
* @return the statuses of the files/directories in the given patch
|
||||||
* @throws IOException if an I/O error occurs.
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
|
public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
|
||||||
|
@ -1020,10 +811,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
* Copy it from FS control to the local dst name.
|
* Copy it from FS control to the local dst name.
|
||||||
* If src and dst are directories, the copyCrc parameter
|
* If src and dst are directories, the copyCrc parameter
|
||||||
* determines whether to copy CRC files.
|
* determines whether to copy CRC files.
|
||||||
* @param src src path.
|
|
||||||
* @param dst dst path.
|
|
||||||
* @param copyCrc copy csc flag.
|
|
||||||
* @throws IOException if an I/O error occurs.
|
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
|
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
|
||||||
|
@ -1102,7 +889,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
|
||||||
final OpenFileParameters parameters) throws IOException {
|
final OpenFileParameters parameters) throws IOException {
|
||||||
AbstractFSBuilderImpl.rejectUnknownMandatoryKeys(
|
AbstractFSBuilderImpl.rejectUnknownMandatoryKeys(
|
||||||
parameters.getMandatoryKeys(),
|
parameters.getMandatoryKeys(),
|
||||||
FS_OPTION_OPENFILE_STANDARD_OPTIONS,
|
Collections.emptySet(),
|
||||||
"for " + path);
|
"for " + path);
|
||||||
return LambdaUtils.eval(
|
return LambdaUtils.eval(
|
||||||
new CompletableFuture<>(),
|
new CompletableFuture<>(),
|
||||||
|
|
|
@ -70,53 +70,30 @@ public abstract class ChecksumFs extends FilterFs {
|
||||||
this.verifyChecksum = inVerifyChecksum;
|
this.verifyChecksum = inVerifyChecksum;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** get the raw file system. */
|
||||||
* get the raw file system.
|
|
||||||
*
|
|
||||||
* @return abstract file system.
|
|
||||||
*/
|
|
||||||
public AbstractFileSystem getRawFs() {
|
public AbstractFileSystem getRawFs() {
|
||||||
return getMyFs();
|
return getMyFs();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the name of the checksum file associated with a file.*/
|
||||||
* Return the name of the checksum file associated with a file.
|
|
||||||
*
|
|
||||||
* @param file the file path.
|
|
||||||
* @return the checksum file associated with a file.
|
|
||||||
*/
|
|
||||||
public Path getChecksumFile(Path file) {
|
public Path getChecksumFile(Path file) {
|
||||||
return new Path(file.getParent(), "." + file.getName() + ".crc");
|
return new Path(file.getParent(), "." + file.getName() + ".crc");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return true iff file is a checksum file name.*/
|
||||||
* Return true iff file is a checksum file name.
|
|
||||||
*
|
|
||||||
* @param file the file path.
|
|
||||||
* @return if is checksum file true,not false.
|
|
||||||
*/
|
|
||||||
public static boolean isChecksumFile(Path file) {
|
public static boolean isChecksumFile(Path file) {
|
||||||
String name = file.getName();
|
String name = file.getName();
|
||||||
return name.startsWith(".") && name.endsWith(".crc");
|
return name.startsWith(".") && name.endsWith(".crc");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the length of the checksum file given the size of the
|
||||||
* Return the length of the checksum file given the size of the
|
|
||||||
* actual file.
|
* actual file.
|
||||||
*
|
**/
|
||||||
* @param file the file path.
|
|
||||||
* @param fileSize file size.
|
|
||||||
* @return check sum file length.
|
|
||||||
*/
|
|
||||||
public long getChecksumFileLength(Path file, long fileSize) {
|
public long getChecksumFileLength(Path file, long fileSize) {
|
||||||
return getChecksumLength(fileSize, getBytesPerSum());
|
return getChecksumLength(fileSize, getBytesPerSum());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Return the bytes Per Checksum. */
|
||||||
* Return the bytes Per Checksum.
|
|
||||||
*
|
|
||||||
* @return bytes per sum.
|
|
||||||
*/
|
|
||||||
public int getBytesPerSum() {
|
public int getBytesPerSum() {
|
||||||
return defaultBytesPerChecksum;
|
return defaultBytesPerChecksum;
|
||||||
}
|
}
|
||||||
|
@ -456,7 +433,7 @@ public abstract class ChecksumFs extends FilterFs {
|
||||||
* Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
|
* Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
|
||||||
* @param src file name
|
* @param src file name
|
||||||
* @param replication new replication
|
* @param replication new replication
|
||||||
* @throws IOException if an I/O error occurs.
|
* @throws IOException
|
||||||
* @return true if successful;
|
* @return true if successful;
|
||||||
* false if file does not exist or is a directory
|
* false if file does not exist or is a directory
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -399,12 +399,6 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
|
||||||
public static final String ZK_ACL_DEFAULT = "world:anyone:rwcda";
|
public static final String ZK_ACL_DEFAULT = "world:anyone:rwcda";
|
||||||
/** Authentication for the ZooKeeper ensemble. */
|
/** Authentication for the ZooKeeper ensemble. */
|
||||||
public static final String ZK_AUTH = ZK_PREFIX + "auth";
|
public static final String ZK_AUTH = ZK_PREFIX + "auth";
|
||||||
/** Principal name for zookeeper servers. */
|
|
||||||
public static final String ZK_SERVER_PRINCIPAL = ZK_PREFIX + "server.principal";
|
|
||||||
/** Kerberos principal name for zookeeper connection. */
|
|
||||||
public static final String ZK_KERBEROS_PRINCIPAL = ZK_PREFIX + "kerberos.principal";
|
|
||||||
/** Kerberos keytab for zookeeper connection. */
|
|
||||||
public static final String ZK_KERBEROS_KEYTAB = ZK_PREFIX + "kerberos.keytab";
|
|
||||||
|
|
||||||
/** Address of the ZooKeeper ensemble. */
|
/** Address of the ZooKeeper ensemble. */
|
||||||
public static final String ZK_ADDRESS = ZK_PREFIX + "address";
|
public static final String ZK_ADDRESS = ZK_PREFIX + "address";
|
||||||
|
@ -417,14 +411,6 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
|
||||||
/** How often to retry a ZooKeeper operation in milliseconds. */
|
/** How often to retry a ZooKeeper operation in milliseconds. */
|
||||||
public static final String ZK_RETRY_INTERVAL_MS =
|
public static final String ZK_RETRY_INTERVAL_MS =
|
||||||
ZK_PREFIX + "retry-interval-ms";
|
ZK_PREFIX + "retry-interval-ms";
|
||||||
/** Keystore location for ZooKeeper client connection over SSL. */
|
|
||||||
public static final String ZK_SSL_KEYSTORE_LOCATION = ZK_PREFIX + "ssl.keystore.location";
|
|
||||||
/** Keystore password for ZooKeeper client connection over SSL. */
|
|
||||||
public static final String ZK_SSL_KEYSTORE_PASSWORD = ZK_PREFIX + "ssl.keystore.password";
|
|
||||||
/** Truststore location for ZooKeeper client connection over SSL. */
|
|
||||||
public static final String ZK_SSL_TRUSTSTORE_LOCATION = ZK_PREFIX + "ssl.truststore.location";
|
|
||||||
/** Truststore password for ZooKeeper client connection over SSL. */
|
|
||||||
public static final String ZK_SSL_TRUSTSTORE_PASSWORD = ZK_PREFIX + "ssl.truststore.password";
|
|
||||||
public static final int ZK_RETRY_INTERVAL_MS_DEFAULT = 1000;
|
public static final int ZK_RETRY_INTERVAL_MS_DEFAULT = 1000;
|
||||||
/** Default domain name resolver for hadoop to use. */
|
/** Default domain name resolver for hadoop to use. */
|
||||||
public static final String HADOOP_DOMAINNAME_RESOLVER_IMPL =
|
public static final String HADOOP_DOMAINNAME_RESOLVER_IMPL =
|
||||||
|
@ -483,21 +469,4 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
|
||||||
* default hadoop temp dir on local system: {@value}.
|
* default hadoop temp dir on local system: {@value}.
|
||||||
*/
|
*/
|
||||||
public static final String HADOOP_TMP_DIR = "hadoop.tmp.dir";
|
public static final String HADOOP_TMP_DIR = "hadoop.tmp.dir";
|
||||||
|
|
||||||
/**
|
|
||||||
* Thread-level IOStats Support.
|
|
||||||
* {@value}
|
|
||||||
*/
|
|
||||||
public static final String IOSTATISTICS_THREAD_LEVEL_ENABLED =
|
|
||||||
"fs.iostatistics.thread.level.enabled";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default value for Thread-level IOStats Support is true.
|
|
||||||
*/
|
|
||||||
public static final boolean IOSTATISTICS_THREAD_LEVEL_ENABLED_DEFAULT =
|
|
||||||
true;
|
|
||||||
|
|
||||||
public static final String HADOOP_SECURITY_RESOLVER_IMPL =
|
|
||||||
"hadoop.security.resolver.impl";
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Number of filesystems instances can be created in parallel.
|
* Number of filesystems instances can be created in parallel.
|
||||||
* <p>
|
* <p></p>
|
||||||
* A higher number here does not necessarily improve performance, especially
|
* A higher number here does not necessarily improve performance, especially
|
||||||
* for object stores, where multiple threads may be attempting to create an FS
|
* for object stores, where multiple threads may be attempting to create an FS
|
||||||
* instance for the same URI.
|
* instance for the same URI.
|
||||||
* </p>
|
* <p></p>
|
||||||
* Default value: {@value}.
|
* Default value: {@value}.
|
||||||
*/
|
*/
|
||||||
public static final String FS_CREATION_PARALLEL_COUNT =
|
public static final String FS_CREATION_PARALLEL_COUNT =
|
||||||
|
@ -181,9 +181,8 @@ public class CommonConfigurationKeysPublic {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
|
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
|
||||||
* <p>
|
* <p></p>
|
||||||
* Default value: {@value}.
|
* Default value: {@value}.
|
||||||
* </p>
|
|
||||||
*/
|
*/
|
||||||
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
|
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
|
||||||
64;
|
64;
|
||||||
|
@ -1000,7 +999,6 @@ public class CommonConfigurationKeysPublic {
|
||||||
String.join(",",
|
String.join(",",
|
||||||
"secret$",
|
"secret$",
|
||||||
"password$",
|
"password$",
|
||||||
"username$",
|
|
||||||
"ssl.keystore.pass$",
|
"ssl.keystore.pass$",
|
||||||
"fs.s3.*[Ss]ecret.?[Kk]ey",
|
"fs.s3.*[Ss]ecret.?[Kk]ey",
|
||||||
"fs.s3a.*.server-side-encryption.key",
|
"fs.s3a.*.server-side-encryption.key",
|
||||||
|
@ -1055,13 +1053,5 @@ public class CommonConfigurationKeysPublic {
|
||||||
public static final String HADOOP_HTTP_IDLE_TIMEOUT_MS_KEY =
|
public static final String HADOOP_HTTP_IDLE_TIMEOUT_MS_KEY =
|
||||||
"hadoop.http.idle_timeout.ms";
|
"hadoop.http.idle_timeout.ms";
|
||||||
public static final int HADOOP_HTTP_IDLE_TIMEOUT_MS_DEFAULT = 60000;
|
public static final int HADOOP_HTTP_IDLE_TIMEOUT_MS_DEFAULT = 60000;
|
||||||
|
|
||||||
/**
|
|
||||||
* To configure scheduling of server metrics update thread. This config is used to indicate
|
|
||||||
* initial delay and delay between each execution of the metric update runnable thread.
|
|
||||||
*/
|
|
||||||
public static final String IPC_SERVER_METRICS_UPDATE_RUNNER_INTERVAL =
|
|
||||||
"ipc.server.metrics.update.runner.interval";
|
|
||||||
public static final int IPC_SERVER_METRICS_UPDATE_RUNNER_INTERVAL_DEFAULT = 5000;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -163,11 +163,5 @@ public final class CommonPathCapabilities {
|
||||||
public static final String ETAGS_PRESERVED_IN_RENAME =
|
public static final String ETAGS_PRESERVED_IN_RENAME =
|
||||||
"fs.capability.etags.preserved.in.rename";
|
"fs.capability.etags.preserved.in.rename";
|
||||||
|
|
||||||
/**
|
|
||||||
* Does this Filesystem support lease recovery operations such as
|
|
||||||
* {@link LeaseRecoverable#recoverLease(Path)} and {@link LeaseRecoverable#isFileClosed(Path)}}?
|
|
||||||
* Value: {@value}.
|
|
||||||
*/
|
|
||||||
public static final String LEASE_RECOVERABLE = "fs.capability.lease.recoverable";
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,13 +37,7 @@ public class CompositeCrcFileChecksum extends FileChecksum {
|
||||||
private DataChecksum.Type crcType;
|
private DataChecksum.Type crcType;
|
||||||
private int bytesPerCrc;
|
private int bytesPerCrc;
|
||||||
|
|
||||||
/**
|
/** Create a CompositeCrcFileChecksum. */
|
||||||
* Create a CompositeCrcFileChecksum.
|
|
||||||
*
|
|
||||||
* @param crc crc.
|
|
||||||
* @param crcType crcType.
|
|
||||||
* @param bytesPerCrc bytesPerCrc.
|
|
||||||
*/
|
|
||||||
public CompositeCrcFileChecksum(
|
public CompositeCrcFileChecksum(
|
||||||
int crc, DataChecksum.Type crcType, int bytesPerCrc) {
|
int crc, DataChecksum.Type crcType, int bytesPerCrc) {
|
||||||
this.crc = crc;
|
this.crc = crc;
|
||||||
|
|
|
@ -149,31 +149,17 @@ public class ContentSummary extends QuotaUsage implements Writable{
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public ContentSummary() {}
|
public ContentSummary() {}
|
||||||
|
|
||||||
/**
|
/** Constructor, deprecated by ContentSummary.Builder
|
||||||
* Constructor, deprecated by ContentSummary.Builder
|
|
||||||
* This constructor implicitly set spaceConsumed the same as length.
|
* This constructor implicitly set spaceConsumed the same as length.
|
||||||
* spaceConsumed and length must be set explicitly with
|
* spaceConsumed and length must be set explicitly with
|
||||||
* ContentSummary.Builder.
|
* ContentSummary.Builder
|
||||||
*
|
|
||||||
* @param length length.
|
|
||||||
* @param fileCount file count.
|
|
||||||
* @param directoryCount directory count.
|
|
||||||
* */
|
* */
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public ContentSummary(long length, long fileCount, long directoryCount) {
|
public ContentSummary(long length, long fileCount, long directoryCount) {
|
||||||
this(length, fileCount, directoryCount, -1L, length, -1L);
|
this(length, fileCount, directoryCount, -1L, length, -1L);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Constructor, deprecated by ContentSummary.Builder */
|
||||||
* Constructor, deprecated by ContentSummary.Builder.
|
|
||||||
*
|
|
||||||
* @param length length.
|
|
||||||
* @param fileCount file count.
|
|
||||||
* @param directoryCount directory count.
|
|
||||||
* @param quota quota.
|
|
||||||
* @param spaceConsumed space consumed.
|
|
||||||
* @param spaceQuota space quota.
|
|
||||||
* */
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public ContentSummary(
|
public ContentSummary(
|
||||||
long length, long fileCount, long directoryCount, long quota,
|
long length, long fileCount, long directoryCount, long quota,
|
||||||
|
@ -186,11 +172,7 @@ public class ContentSummary extends QuotaUsage implements Writable{
|
||||||
setSpaceQuota(spaceQuota);
|
setSpaceQuota(spaceQuota);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Constructor for ContentSummary.Builder*/
|
||||||
* Constructor for ContentSummary.Builder.
|
|
||||||
*
|
|
||||||
* @param builder builder.
|
|
||||||
*/
|
|
||||||
private ContentSummary(Builder builder) {
|
private ContentSummary(Builder builder) {
|
||||||
super(builder);
|
super(builder);
|
||||||
this.length = builder.length;
|
this.length = builder.length;
|
||||||
|
|
|
@ -189,8 +189,6 @@ public enum CreateFlag {
|
||||||
/**
|
/**
|
||||||
* Validate the CreateFlag for the append operation. The flag must contain
|
* Validate the CreateFlag for the append operation. The flag must contain
|
||||||
* APPEND, and cannot contain OVERWRITE.
|
* APPEND, and cannot contain OVERWRITE.
|
||||||
*
|
|
||||||
* @param flag enum set flag.
|
|
||||||
*/
|
*/
|
||||||
public static void validateForAppend(EnumSet<CreateFlag> flag) {
|
public static void validateForAppend(EnumSet<CreateFlag> flag) {
|
||||||
validate(flag);
|
validate(flag);
|
||||||
|
|
|
@ -65,10 +65,7 @@ public class DF extends Shell {
|
||||||
return dirPath;
|
return dirPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** @return a string indicating which filesystem volume we're checking. */
|
||||||
* @return a string indicating which filesystem volume we're checking.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
|
||||||
public String getFilesystem() throws IOException {
|
public String getFilesystem() throws IOException {
|
||||||
if (Shell.WINDOWS) {
|
if (Shell.WINDOWS) {
|
||||||
this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
|
this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
|
||||||
|
@ -103,10 +100,7 @@ public class DF extends Shell {
|
||||||
return (int) (used * 100.0 / cap);
|
return (int) (used * 100.0 / cap);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** @return the filesystem mount point for the indicated volume */
|
||||||
* @return the filesystem mount point for the indicated volume.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
|
||||||
public String getMount() throws IOException {
|
public String getMount() throws IOException {
|
||||||
// Abort early if specified path does not exist
|
// Abort early if specified path does not exist
|
||||||
if (!dirFile.exists()) {
|
if (!dirFile.exists()) {
|
||||||
|
|
|
@ -47,11 +47,7 @@ public class DelegationTokenRenewer
|
||||||
/** @return the renew token. */
|
/** @return the renew token. */
|
||||||
public Token<?> getRenewToken();
|
public Token<?> getRenewToken();
|
||||||
|
|
||||||
/**
|
/** Set delegation token. */
|
||||||
* Set delegation token.
|
|
||||||
* @param <T> generic type T.
|
|
||||||
* @param token token.
|
|
||||||
*/
|
|
||||||
public <T extends TokenIdentifier> void setDelegationToken(Token<T> token);
|
public <T extends TokenIdentifier> void setDelegationToken(Token<T> token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,11 +172,7 @@ public class DelegationTokenRenewer
|
||||||
/** Queue to maintain the RenewActions to be processed by the {@link #run()} */
|
/** Queue to maintain the RenewActions to be processed by the {@link #run()} */
|
||||||
private volatile DelayQueue<RenewAction<?>> queue = new DelayQueue<RenewAction<?>>();
|
private volatile DelayQueue<RenewAction<?>> queue = new DelayQueue<RenewAction<?>>();
|
||||||
|
|
||||||
/**
|
/** For testing purposes */
|
||||||
* For testing purposes.
|
|
||||||
*
|
|
||||||
* @return renew queue length.
|
|
||||||
*/
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
protected int getRenewQueueLength() {
|
protected int getRenewQueueLength() {
|
||||||
return queue.size();
|
return queue.size();
|
||||||
|
@ -219,13 +211,7 @@ public class DelegationTokenRenewer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Add a renew action to the queue. */
|
||||||
* Add a renew action to the queue.
|
|
||||||
*
|
|
||||||
* @param <T> generic type T.
|
|
||||||
* @param fs file system.
|
|
||||||
* @return renew action.
|
|
||||||
* */
|
|
||||||
@SuppressWarnings("static-access")
|
@SuppressWarnings("static-access")
|
||||||
public <T extends FileSystem & Renewable> RenewAction<T> addRenewAction(final T fs) {
|
public <T extends FileSystem & Renewable> RenewAction<T> addRenewAction(final T fs) {
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
|
@ -245,9 +231,7 @@ public class DelegationTokenRenewer
|
||||||
/**
|
/**
|
||||||
* Remove the associated renew action from the queue
|
* Remove the associated renew action from the queue
|
||||||
*
|
*
|
||||||
* @param <T> generic type T.
|
* @throws IOException
|
||||||
* @param fs file system.
|
|
||||||
* @throws IOException raised on errors performing I/O.
|
|
||||||
*/
|
*/
|
||||||
public <T extends FileSystem & Renewable> void removeRenewAction(
|
public <T extends FileSystem & Renewable> void removeRenewAction(
|
||||||
final T fs) throws IOException {
|
final T fs) throws IOException {
|
||||||
|
@ -256,8 +240,9 @@ public class DelegationTokenRenewer
|
||||||
try {
|
try {
|
||||||
action.cancel();
|
action.cancel();
|
||||||
} catch (InterruptedException ie) {
|
} catch (InterruptedException ie) {
|
||||||
LOG.error("Interrupted while canceling token for {} filesystem.", fs.getUri());
|
LOG.error("Interrupted while canceling token for " + fs.getUri()
|
||||||
LOG.debug("Exception in removeRenewAction.", ie);
|
+ "filesystem");
|
||||||
|
LOG.debug("Exception in removeRenewAction: {}", ie);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,34 +28,6 @@ import org.apache.hadoop.classification.InterfaceStability;
|
||||||
* The base interface which various FileSystem FileContext Builder
|
* The base interface which various FileSystem FileContext Builder
|
||||||
* interfaces can extend, and which underlying implementations
|
* interfaces can extend, and which underlying implementations
|
||||||
* will then implement.
|
* will then implement.
|
||||||
* <p>
|
|
||||||
* HADOOP-16202 expanded the opt() and must() arguments with
|
|
||||||
* operator overloading, but HADOOP-18724 identified mapping problems:
|
|
||||||
* passing a long value in to {@code opt()} could end up invoking
|
|
||||||
* {@code opt(string, double)}, which could then trigger parse failures.
|
|
||||||
* <p>
|
|
||||||
* To fix this without forcing existing code to break/be recompiled.
|
|
||||||
* <ol>
|
|
||||||
* <li>A new method to explicitly set a long value is added:
|
|
||||||
* {@link #optLong(String, long)}
|
|
||||||
* </li>
|
|
||||||
* <li>A new method to explicitly set a double value is added:
|
|
||||||
* {@link #optLong(String, long)}
|
|
||||||
* </li>
|
|
||||||
* <li>
|
|
||||||
* All of {@link #opt(String, long)}, {@link #opt(String, float)} and
|
|
||||||
* {@link #opt(String, double)} invoke {@link #optLong(String, long)}.
|
|
||||||
* </li>
|
|
||||||
* <li>
|
|
||||||
* The same changes have been applied to {@code must()} methods.
|
|
||||||
* </li>
|
|
||||||
* </ol>
|
|
||||||
* The forwarding of existing double/float setters to the long setters ensure
|
|
||||||
* that existing code will link, but are guaranteed to always set a long value.
|
|
||||||
* If you need to write code which works correctly with all hadoop releases,
|
|
||||||
* covert the option to a string explicitly and then call {@link #opt(String, String)}
|
|
||||||
* or {@link #must(String, String)} as appropriate.
|
|
||||||
*
|
|
||||||
* @param <S> Return type on the {@link #build()} call.
|
* @param <S> Return type on the {@link #build()} call.
|
||||||
* @param <B> type of builder itself.
|
* @param <B> type of builder itself.
|
||||||
*/
|
*/
|
||||||
|
@ -65,225 +37,87 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set optional Builder parameter.
|
* Set optional Builder parameter.
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
*/
|
*/
|
||||||
B opt(@Nonnull String key, @Nonnull String value);
|
B opt(@Nonnull String key, @Nonnull String value);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set optional boolean parameter for the Builder.
|
* Set optional boolean parameter for the Builder.
|
||||||
* @param key key.
|
*
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
* @see #opt(String, String)
|
||||||
*/
|
*/
|
||||||
default B opt(@Nonnull String key, boolean value) {
|
B opt(@Nonnull String key, boolean value);
|
||||||
return opt(key, Boolean.toString(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set optional int parameter for the Builder.
|
* Set optional int parameter for the Builder.
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
* @see #opt(String, String)
|
||||||
*/
|
*/
|
||||||
default B opt(@Nonnull String key, int value) {
|
B opt(@Nonnull String key, int value);
|
||||||
return optLong(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This parameter is converted to a long and passed
|
* Set optional float parameter for the Builder.
|
||||||
* to {@link #optLong(String, long)} -all
|
|
||||||
* decimal precision is lost.
|
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
|
||||||
* @deprecated use {@link #optDouble(String, double)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
default B opt(@Nonnull String key, float value) {
|
|
||||||
return optLong(key, (long) value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set optional long parameter for the Builder.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @deprecated use {@link #optLong(String, long)} where possible.
|
|
||||||
*/
|
|
||||||
default B opt(@Nonnull String key, long value) {
|
|
||||||
return optLong(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pass an optional double parameter for the Builder.
|
|
||||||
* This parameter is converted to a long and passed
|
|
||||||
* to {@link #optLong(String, long)} -all
|
|
||||||
* decimal precision is lost.
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
|
||||||
* @deprecated use {@link #optDouble(String, double)}
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
default B opt(@Nonnull String key, double value) {
|
|
||||||
return optLong(key, (long) value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set an array of string values as optional parameter for the Builder.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param values values.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
* @see #opt(String, String)
|
||||||
*/
|
*/
|
||||||
B opt(@Nonnull String key, @Nonnull String... values);
|
B opt(@Nonnull String key, float value);
|
||||||
|
|
||||||
/**
|
|
||||||
* Set optional long parameter for the Builder.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
|
||||||
*/
|
|
||||||
default B optLong(@Nonnull String key, long value) {
|
|
||||||
return opt(key, Long.toString(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set optional double parameter for the Builder.
|
* Set optional double parameter for the Builder.
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
* @see #opt(String, String)
|
||||||
*/
|
*/
|
||||||
default B optDouble(@Nonnull String key, double value) {
|
B opt(@Nonnull String key, double value);
|
||||||
return opt(key, Double.toString(value));
|
|
||||||
}
|
/**
|
||||||
|
* Set an array of string values as optional parameter for the Builder.
|
||||||
|
*
|
||||||
|
* @see #opt(String, String)
|
||||||
|
*/
|
||||||
|
B opt(@Nonnull String key, @Nonnull String... values);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set mandatory option to the Builder.
|
* Set mandatory option to the Builder.
|
||||||
*
|
*
|
||||||
* If the option is not supported or unavailable,
|
* If the option is not supported or unavailable,
|
||||||
* the client should expect {@link #build()} throws IllegalArgumentException.
|
* the client should expect {@link #build()} throws IllegalArgumentException.
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
*/
|
*/
|
||||||
B must(@Nonnull String key, @Nonnull String value);
|
B must(@Nonnull String key, @Nonnull String value);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set mandatory boolean option.
|
* Set mandatory boolean option.
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #must(String, String)
|
* @see #must(String, String)
|
||||||
*/
|
*/
|
||||||
default B must(@Nonnull String key, boolean value) {
|
B must(@Nonnull String key, boolean value);
|
||||||
return must(key, Boolean.toString(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set mandatory int option.
|
* Set mandatory int option.
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #must(String, String)
|
* @see #must(String, String)
|
||||||
*/
|
*/
|
||||||
default B must(@Nonnull String key, int value) {
|
B must(@Nonnull String key, int value);
|
||||||
return mustLong(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This parameter is converted to a long and passed
|
* Set mandatory float option.
|
||||||
* to {@link #mustLong(String, long)} -all
|
|
||||||
* decimal precision is lost.
|
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @deprecated use {@link #mustDouble(String, double)} to set floating point.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
default B must(@Nonnull String key, float value) {
|
|
||||||
return mustLong(key, (long) value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set mandatory long option.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #must(String, String)
|
* @see #must(String, String)
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
B must(@Nonnull String key, float value);
|
||||||
default B must(@Nonnull String key, long value) {
|
|
||||||
return mustLong(key, (long) value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set mandatory long option, despite passing in a floating
|
* Set mandatory double option.
|
||||||
* point value.
|
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #must(String, String)
|
* @see #must(String, String)
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
B must(@Nonnull String key, double value);
|
||||||
default B must(@Nonnull String key, double value) {
|
|
||||||
return mustLong(key, (long) value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set a string array as mandatory option.
|
* Set a string array as mandatory option.
|
||||||
*
|
*
|
||||||
* @param key key.
|
|
||||||
* @param values values.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #must(String, String)
|
* @see #must(String, String)
|
||||||
*/
|
*/
|
||||||
B must(@Nonnull String key, @Nonnull String... values);
|
B must(@Nonnull String key, @Nonnull String... values);
|
||||||
|
|
||||||
/**
|
|
||||||
* Set mandatory long parameter for the Builder.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
|
||||||
*/
|
|
||||||
default B mustLong(@Nonnull String key, long value) {
|
|
||||||
return must(key, Long.toString(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set mandatory double parameter for the Builder.
|
|
||||||
*
|
|
||||||
* @param key key.
|
|
||||||
* @param value value.
|
|
||||||
* @return generic type B.
|
|
||||||
* @see #opt(String, String)
|
|
||||||
*/
|
|
||||||
default B mustDouble(@Nonnull String key, double value) {
|
|
||||||
return must(key, Double.toString(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instantiate the object which was being built.
|
* Instantiate the object which was being built.
|
||||||
*
|
*
|
||||||
|
@ -291,7 +125,6 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
|
||||||
* @throws UnsupportedOperationException if the filesystem does not support
|
* @throws UnsupportedOperationException if the filesystem does not support
|
||||||
* the specific operation.
|
* the specific operation.
|
||||||
* @throws IOException on filesystem IO errors.
|
* @throws IOException on filesystem IO errors.
|
||||||
* @return generic type S.
|
|
||||||
*/
|
*/
|
||||||
S build() throws IllegalArgumentException,
|
S build() throws IllegalArgumentException,
|
||||||
UnsupportedOperationException, IOException;
|
UnsupportedOperationException, IOException;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue