Compare commits

..

1 Commits

Author SHA1 Message Date
Inigo Goiri be6a9b0a0d Revert "HDFS-15423 RBF: WebHDFS create shouldn't choose DN from all sub-clusters (#2605)"
This reverts commit cb3ed32fe0.
2021-04-12 20:44:07 -07:00
5381 changed files with 153268 additions and 817512 deletions

View File

@ -14,8 +14,6 @@
# limitations under the License.
github:
ghp_path: /
ghp_branch: gh-pages
enabled_merge_buttons:
squash: true
merge: false
@ -24,4 +22,4 @@ notifications:
commits: common-commits@hadoop.apache.org
issues: common-issues@hadoop.apache.org
pullrequests: common-issues@hadoop.apache.org
jira_options: comment link label
jira_options: link label worklog

View File

@ -1,19 +1,6 @@
<!--
Thanks for sending a pull request!
1. If this is your first time, please read our contributor guidelines: https://cwiki.apache.org/confluence/display/HADOOP/How+To+Contribute
2. Make sure your PR title starts with JIRA issue id, e.g., 'HADOOP-17799. Your PR title ...'.
-->
### Description of PR
### How was this patch tested?
### For code changes:
- [ ] Does the title or this PR starts with the corresponding JIRA issue id (e.g. 'HADOOP-17799. Your PR title ...')?
- [ ] Object storage: have the integration tests been executed and the endpoint declared according to the connector-specific documentation?
- [ ] If adding new dependencies to the code, are these dependencies licensed in a way that is compatible for inclusion under [ASF 2.0](http://www.apache.org/legal/resolved.html#category-a)?
- [ ] If applicable, have you updated the `LICENSE`, `LICENSE-binary`, `NOTICE-binary` files?
## NOTICE
Please create an issue in ASF JIRA before opening a pull request,
and you need to set the title of the pull request which starts with
the corresponding JIRA issue number. (e.g. HADOOP-XXXXX. Fix a typo in YYY.)
For more details, please see https://cwiki.apache.org/confluence/display/HADOOP/How+To+Contribute

View File

@ -1,59 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: website
# Controls when the action will run.
on:
push:
branches: [ trunk ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout Hadoop trunk
uses: actions/checkout@v3
with:
repository: apache/hadoop
- name: Set up JDK 8
uses: actions/setup-java@v3
with:
java-version: '8'
distribution: 'temurin'
- name: Cache local Maven repository
uses: actions/cache@v3
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Build Hadoop maven plugins
run: cd hadoop-maven-plugins && mvn --batch-mode install
- name: Build Hadoop
run: mvn clean install -DskipTests -DskipShade
- name: Build document
run: mvn clean site
- name: Stage document
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
- name: Deploy to GitHub Pages
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./staging/hadoop-project
user_name: 'github-actions[bot]'
user_email: 'github-actions[bot]@users.noreply.github.com'

View File

@ -1,17 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dev-support/docker/Dockerfile_windows_10

View File

@ -51,13 +51,14 @@ Known issues:
and run your IDE and Docker etc inside that VM.
----------------------------------------------------------------------------------
Installing required packages for clean install of Ubuntu 18.04 LTS Desktop.
(For Ubuntu 20.04, gcc/g++ and cmake bundled with Ubuntu can be used.
Refer to dev-support/docker/Dockerfile):
Installing required packages for clean install of Ubuntu 14.04 LTS Desktop:
* Open JDK 1.8
* Oracle JDK 1.8 (preferred)
$ sudo apt-get purge openjdk*
$ sudo apt-get install software-properties-common
$ sudo add-apt-repository ppa:webupd8team/java
$ sudo apt-get update
$ sudo apt-get -y install openjdk-8-jdk
$ sudo apt-get install oracle-java8-installer
* Maven
$ sudo apt-get -y install maven
* Native libraries
@ -85,13 +86,12 @@ Refer to dev-support/docker/Dockerfile):
$ curl -L https://sourceforge.net/projects/boost/files/boost/1.72.0/boost_1_72_0.tar.bz2/download > boost_1_72_0.tar.bz2
$ tar --bzip2 -xf boost_1_72_0.tar.bz2 && cd boost_1_72_0
$ ./bootstrap.sh --prefix=/usr/
$ ./b2 --without-python
$ sudo ./b2 --without-python install
$ ./b2 --without-python install
Optional packages:
* Snappy compression (only used for hadoop-mapreduce-client-nativetask)
$ sudo apt-get install libsnappy-dev
$ sudo apt-get install snappy libsnappy-dev
* Intel ISA-L library for erasure coding
Please refer to https://01.org/intel%C2%AE-storage-acceleration-library-open-source-version
(OR https://github.com/01org/isa-l)
@ -143,7 +143,7 @@ Maven build goals:
* Run checkstyle : mvn compile checkstyle:checkstyle
* Install JAR in M2 cache : mvn install
* Deploy JAR to Maven repo : mvn deploy
* Run clover : mvn test -Pclover
* Run clover : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
* Run Rat : mvn apache-rat:check
* Build javadocs : mvn javadoc:javadoc
* Build distribution : mvn package [-Pdist][-Pdocs][-Psrc][-Pnative][-Dtar][-Preleasedocs][-Pyarn-ui]
@ -326,35 +326,40 @@ to update SNAPSHOTs from external repos.
----------------------------------------------------------------------------------
Importing projects to eclipse
At first, install artifacts including hadoop-maven-plugins at the top of the source tree.
When you import the project to eclipse, install hadoop-maven-plugins at first.
$ mvn clean install -DskipTests -DskipShade
$ cd hadoop-maven-plugins
$ mvn install
Then, import to eclipse by specifying the root directory of the project via
[File] > [Import] > [Maven] > [Existing Maven Projects].
Then, generate eclipse project files.
$ mvn eclipse:eclipse -DskipTests
At last, import to eclipse by specifying the root directory of the project via
[File] > [Import] > [Existing Projects into Workspace].
----------------------------------------------------------------------------------
Building distributions:
Create binary distribution without native code and without Javadocs:
Create binary distribution without native code and without documentation:
$ mvn package -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true
Create binary distribution with native code:
Create binary distribution with native code and with documentation:
$ mvn package -Pdist,native -DskipTests -Dtar
$ mvn package -Pdist,native,docs -DskipTests -Dtar
Create source distribution:
$ mvn package -Psrc -DskipTests
Create source and binary distributions with native code:
Create source and binary distributions with native code and documentation:
$ mvn package -Pdist,native,src -DskipTests -Dtar
$ mvn package -Pdist,native,docs,src -DskipTests -Dtar
Create a local staging version of the website (in /tmp/hadoop-site)
$ mvn site site:stage -Preleasedocs,docs -DstagingDirectory=/tmp/hadoop-site
$ mvn clean site -Preleasedocs; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
Note that the site needs to be built in a second pass after other artifacts.
@ -492,66 +497,39 @@ Building on CentOS 8
----------------------------------------------------------------------------------
Building on Windows 10
Building on Windows
----------------------------------------------------------------------------------
Requirements:
* Windows 10
* Windows System
* JDK 1.8
* Maven 3.0 or later (maven.apache.org)
* Boost 1.72 (boost.org)
* Protocol Buffers 3.7.1 (https://github.com/protocolbuffers/protobuf/releases)
* CMake 3.19 or newer (cmake.org)
* Visual Studio 2019 (visualstudio.com)
* Windows SDK 8.1 (optional, if building CPU rate control for the container executor. Get this from
http://msdn.microsoft.com/en-us/windows/bg162891.aspx)
* Zlib (zlib.net, if building native code bindings for zlib)
* Git (preferably, get this from https://git-scm.com/download/win since the package also contains
Unix command-line tools that are needed during packaging).
* Python (python.org, for generation of docs using 'mvn site')
* Maven 3.0 or later
* Boost 1.72
* Protocol Buffers 3.7.1
* CMake 3.19 or newer
* Visual Studio 2010 Professional or Higher
* Windows SDK 8.1 (if building CPU rate control for the container executor)
* zlib headers (if building native code bindings for zlib)
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
* Unix command-line tools from GnuWin32: sh, mkdir, rm, cp, tar, gzip. These
tools must be present on your PATH.
* Python ( for generation of docs using 'mvn site')
Unix command-line tools are also included with the Windows Git package which
can be downloaded from http://git-scm.com/downloads
If using Visual Studio, it must be Professional level or higher.
Do not use Visual Studio Express. It does not support compiling for 64-bit,
which is problematic if running a 64-bit system.
The Windows SDK 8.1 is available to download at:
http://msdn.microsoft.com/en-us/windows/bg162891.aspx
Cygwin is not required.
----------------------------------------------------------------------------------
Building guidelines:
Hadoop repository provides the Dockerfile for building Hadoop on Windows 10, located at
dev-support/docker/Dockerfile_windows_10. It is highly recommended to use this and create the
Docker image for building Hadoop on Windows 10, since you don't have to install anything else
other than Docker and no additional steps are required in terms of aligning the environment with
the necessary paths etc.
However, if you still prefer taking the route of not using Docker, this Dockerfile_windows_10 will
still be immensely useful as a raw guide for all the steps involved in creating the environment
needed to build Hadoop on Windows 10.
Building using the Docker:
We first need to build the Docker image for building Hadoop on Windows 10. Run this command from
the root of the Hadoop repository.
> docker build -t hadoop-windows-10-builder -f .\dev-support\docker\Dockerfile_windows_10 .\dev-support\docker\
Start the container with the image that we just built.
> docker run --rm -it hadoop-windows-10-builder
You can now clone the Hadoop repo inside this container and proceed with the build.
NOTE:
While one may perceive the idea of mounting the locally cloned (on the host filesystem) Hadoop
repository into the container (using the -v option), we have seen the build to fail owing to some
files not being able to be located by Maven. Thus, we suggest cloning the Hadoop repository to a
non-mounted folder inside the container and proceed with the build. When the build is completed,
you may use the "docker cp" command to copy the built Hadoop tar.gz file from the docker container
to the host filesystem. If you still would like to mount the Hadoop codebase, a workaround would
be to copy the mounted Hadoop codebase into another folder (which doesn't point to a mount) in the
container's filesystem and use this for building.
However, we noticed no build issues when the Maven repository from the host filesystem was mounted
into the container. One may use this to greatly reduce the build time. Assuming that the Maven
repository is located at D:\Maven\Repository in the host filesystem, one can use the following
command to mount the same onto the default Maven repository location while launching the container.
> docker run --rm -v D:\Maven\Repository:C:\Users\ContainerAdministrator\.m2\repository -it hadoop-windows-10-builder
Building:
Keep the source code tree in a short path to avoid running into problems related
@ -567,24 +545,6 @@ configure the bit-ness of the build, and set several optional components.
Several tests require that the user must have the Create Symbolic Links
privilege.
To simplify the installation of Boost, Protocol buffers, OpenSSL and Zlib dependencies we can use
vcpkg (https://github.com/Microsoft/vcpkg.git). Upon cloning the vcpkg repo, checkout the commit
7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d to get the required versions of the dependencies
mentioned above.
> git clone https://github.com/Microsoft/vcpkg.git
> cd vcpkg
> git checkout 7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d
> .\bootstrap-vcpkg.bat
> .\vcpkg.exe install boost:x64-windows
> .\vcpkg.exe install protobuf:x64-windows
> .\vcpkg.exe install openssl:x64-windows
> .\vcpkg.exe install zlib:x64-windows
Set the following environment variables -
(Assuming that vcpkg was checked out at C:\vcpkg)
> set PROTOBUF_HOME=C:\vcpkg\installed\x64-windows
> set MAVEN_OPTS=-Xmx2048M -Xss128M
All Maven goals are the same as described above with the exception that
native code is built by enabling the 'native-win' Maven profile. -Pnative-win
is enabled by default when building on Windows since the native components
@ -602,24 +562,6 @@ the zlib 1.2.7 source tree.
http://www.zlib.net/
Build command:
The following command builds all the modules in the Hadoop project and generates the tar.gz file in
hadoop-dist/target upon successful build. Run these commands from an
"x64 Native Tools Command Prompt for VS 2019" which can be found under "Visual Studio 2019" in the
Windows start menu. If you're using the Docker image from Dockerfile_windows_10, you'll be
logged into "x64 Native Tools Command Prompt for VS 2019" automatically when you start the
container.
> set classpath=
> set PROTOBUF_HOME=C:\vcpkg\installed\x64-windows
> mvn clean package -Dhttps.protocols=TLSv1.2 -DskipTests -DskipDocs -Pnative-win,dist^
-Drequire.openssl -Drequire.test.libhadoop -Pyarn-ui -Dshell-executable=C:\Git\bin\bash.exe^
-Dtar -Dopenssl.prefix=C:\vcpkg\installed\x64-windows^
-Dcmake.prefix.path=C:\vcpkg\installed\x64-windows^
-Dwindows.cmake.toolchain.file=C:\vcpkg\scripts\buildsystems\vcpkg.cmake -Dwindows.cmake.build.type=RelWithDebInfo^
-Dwindows.build.hdfspp.dll=off -Dwindows.no.sasl=on -Duse.platformToolsetVersion=v142
----------------------------------------------------------------------------------
Building distributions:

View File

@ -210,28 +210,28 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java
com.aliyun:aliyun-java-sdk-core:4.5.10
com.aliyun:aliyun-java-sdk-kms:2.11.0
com.aliyun:aliyun-java-sdk-ram:3.1.0
com.aliyun:aliyun-java-sdk-core:3.4.0
com.aliyun:aliyun-java-sdk-ecs:4.2.0
com.aliyun:aliyun-java-sdk-ram:3.0.0
com.aliyun:aliyun-java-sdk-sts:3.0.0
com.aliyun.oss:aliyun-sdk-oss:3.13.2
com.amazonaws:aws-java-sdk-bundle:1.12.316
com.aliyun.oss:aliyun-sdk-oss:3.4.1
com.amazonaws:aws-java-sdk-bundle:1.11.901
com.cedarsoftware:java-util:1.9.0
com.cedarsoftware:json-io:2.5.1
com.fasterxml.jackson.core:jackson-annotations:2.12.7
com.fasterxml.jackson.core:jackson-core:2.12.7
com.fasterxml.jackson.core:jackson-databind:2.12.7.1
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.12.7
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.12.7
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.12.7
com.fasterxml.jackson.core:jackson-annotations:2.9.9
com.fasterxml.jackson.core:jackson-core:2.9.9
com.fasterxml.jackson.core:jackson-databind:2.9.9.2
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.9.9
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.9.9
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.9.9
com.fasterxml.uuid:java-uuid-generator:3.1.4
com.fasterxml.woodstox:woodstox-core:5.4.0
com.fasterxml.woodstox:woodstox-core:5.3.0
com.github.davidmoten:rxjava-extras:0.8.0.17
com.github.stephenc.jcip:jcip-annotations:1.0-1
com.google:guice:4.0
com.google:guice-servlet:4.0
com.google.api.grpc:proto-google-common-protos:1.0.0
com.google.code.gson:2.9.0
com.google.code.gson:2.2.4
com.google.errorprone:error_prone_annotations:2.2.0
com.google.j2objc:j2objc-annotations:1.1
com.google.json-simple:json-simple:1.1.1
@ -240,17 +240,19 @@ com.google.guava:guava:20.0
com.google.guava:guava:27.0-jre
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
com.microsoft.azure:azure-storage:7.0.0
com.nimbusds:nimbus-jose-jwt:9.31
com.squareup.okhttp3:okhttp:4.10.0
com.squareup.okio:okio:3.2.0
com.zaxxer:HikariCP:4.0.3
commons-beanutils:commons-beanutils:1.9.4
commons-cli:commons-cli:1.5.0
com.nimbusds:nimbus-jose-jwt:4.41.1
com.squareup.okhttp:okhttp:2.7.5
com.squareup.okio:okio:1.6.0
com.zaxxer:HikariCP-java7:2.4.12
commons-beanutils:commons-beanutils:1.9.3
commons-cli:commons-cli:1.2
commons-codec:commons-codec:1.11
commons-collections:commons-collections:3.2.2
commons-daemon:commons-daemon:1.0.13
commons-io:commons-io:2.8.0
commons-net:commons-net:3.9.0
commons-io:commons-io:2.5
commons-lang:commons-lang:2.6
commons-logging:commons-logging:1.1.3
commons-net:commons-net:3.6
de.ruedigermoeller:fst:2.50
io.grpc:grpc-api:1.26.0
io.grpc:grpc-context:1.26.0
@ -259,36 +261,18 @@ io.grpc:grpc-netty:1.26.0
io.grpc:grpc-protobuf:1.26.0
io.grpc:grpc-protobuf-lite:1.26.0
io.grpc:grpc-stub:1.26.0
io.netty:netty-all:4.1.77.Final
io.netty:netty-buffer:4.1.77.Final
io.netty:netty-codec:4.1.77.Final
io.netty:netty-codec-dns:4.1.77.Final
io.netty:netty-codec-haproxy:4.1.77.Final
io.netty:netty-codec-http:4.1.77.Final
io.netty:netty-codec-http2:4.1.77.Final
io.netty:netty-codec-memcache:4.1.77.Final
io.netty:netty-codec-mqtt:4.1.77.Final
io.netty:netty-codec-redis:4.1.77.Final
io.netty:netty-codec-smtp:4.1.77.Final
io.netty:netty-codec-socks:4.1.77.Final
io.netty:netty-codec-stomp:4.1.77.Final
io.netty:netty-codec-xml:4.1.77.Final
io.netty:netty-common:4.1.77.Final
io.netty:netty-handler:4.1.77.Final
io.netty:netty-handler-proxy:4.1.77.Final
io.netty:netty-resolver:4.1.77.Final
io.netty:netty-resolver-dns:4.1.77.Final
io.netty:netty-transport:4.1.77.Final
io.netty:netty-transport-rxtx:4.1.77.Final
io.netty:netty-transport-sctp:4.1.77.Final
io.netty:netty-transport-udt:4.1.77.Final
io.netty:netty-transport-classes-epoll:4.1.77.Final
io.netty:netty-transport-native-unix-common:4.1.77.Final
io.netty:netty-transport-classes-kqueue:4.1.77.Final
io.netty:netty-resolver-dns-classes-macos:4.1.77.Final
io.netty:netty-transport-native-epoll:4.1.77.Final
io.netty:netty-transport-native-kqueue:4.1.77.Final
io.netty:netty-resolver-dns-native-macos:4.1.77.Final
io.netty:netty:3.10.6.Final
io.netty:netty-all:4.1.42.Final
io.netty:netty-buffer:4.1.27.Final
io.netty:netty-codec:4.1.27.Final
io.netty:netty-codec-http:4.1.27.Final
io.netty:netty-codec-http2:4.1.27.Final
io.netty:netty-codec-socks:4.1.27.Final
io.netty:netty-common:4.1.27.Final
io.netty:netty-handler:4.1.27.Final
io.netty:netty-handler-proxy:4.1.27.Final
io.netty:netty-resolver:4.1.27.Final
io.netty:netty-transport:4.1.27.Final
io.opencensus:opencensus-api:0.12.3
io.opencensus:opencensus-contrib-grpc-metrics:0.12.3
io.reactivex:rxjava:1.3.8
@ -299,71 +283,72 @@ javax.inject:javax.inject:1
log4j:log4j:1.2.17
net.java.dev.jna:jna:5.2.0
net.minidev:accessors-smart:1.2
org.apache.avro:avro:1.9.2
net.minidev:json-smart:2.3
org.apache.avro:avro:1.7.7
org.apache.commons:commons-collections4:4.2
org.apache.commons:commons-compress:1.21
org.apache.commons:commons-configuration2:2.8.0
org.apache.commons:commons-csv:1.9.0
org.apache.commons:commons-compress:1.19
org.apache.commons:commons-configuration2:2.1.1
org.apache.commons:commons-csv:1.0
org.apache.commons:commons-digester:1.8.1
org.apache.commons:commons-lang3:3.12.0
org.apache.commons:commons-math3:3.6.1
org.apache.commons:commons-text:1.10.0
org.apache.commons:commons-lang3:3.7
org.apache.commons:commons-math3:3.1.1
org.apache.commons:commons-text:1.4
org.apache.commons:commons-validator:1.6
org.apache.curator:curator-client:5.2.0
org.apache.curator:curator-framework:5.2.0
org.apache.curator:curator-recipes:5.2.0
org.apache.curator:curator-client:2.13.0
org.apache.curator:curator-framework:2.13.0
org.apache.curator:curator-recipes:2.13.0
org.apache.geronimo.specs:geronimo-jcache_1.0_spec:1.0-alpha-1
org.apache.hbase:hbase-annotations:1.7.1
org.apache.hbase:hbase-client:1.7.1
org.apache.hbase:hbase-common:1.7.1
org.apache.hbase:hbase-protocol:1.7.1
org.apache.hbase:hbase-annotations:1.4.8
org.apache.hbase:hbase-client:1.4.8
org.apache.hbase:hbase-common:1.4.8
org.apache.hbase:hbase-protocol:1.4.8
org.apache.htrace:htrace-core:3.1.0-incubating
org.apache.htrace:htrace-core4:4.1.0-incubating
org.apache.httpcomponents:httpclient:4.5.6
org.apache.httpcomponents:httpcore:4.4.10
org.apache.kafka:kafka-clients:2.8.2
org.apache.kerby:kerb-admin:2.0.3
org.apache.kerby:kerb-client:2.0.3
org.apache.kerby:kerb-common:2.0.3
org.apache.kerby:kerb-core:2.0.3
org.apache.kerby:kerb-crypto:2.0.3
org.apache.kerby:kerb-identity:2.0.3
org.apache.kerby:kerb-server:2.0.3
org.apache.kerby:kerb-simplekdc:2.0.3
org.apache.kerby:kerb-util:2.0.3
org.apache.kerby:kerby-asn1:2.0.3
org.apache.kerby:kerby-config:2.0.3
org.apache.kerby:kerby-pkix:2.0.3
org.apache.kerby:kerby-util:2.0.3
org.apache.kerby:kerby-xdr:2.0.3
org.apache.kerby:token-provider:2.0.3
org.apache.solr:solr-solrj:8.8.2
org.apache.kafka:kafka-clients:2.4.0
org.apache.kerby:kerb-admin:1.0.1
org.apache.kerby:kerb-client:1.0.1
org.apache.kerby:kerb-common:1.0.1
org.apache.kerby:kerb-core:1.0.1
org.apache.kerby:kerb-crypto:1.0.1
org.apache.kerby:kerb-identity:1.0.1
org.apache.kerby:kerb-server:1.0.1
org.apache.kerby:kerb-simplekdc:1.0.1
org.apache.kerby:kerb-util:1.0.1
org.apache.kerby:kerby-asn1:1.0.1
org.apache.kerby:kerby-config:1.0.1
org.apache.kerby:kerby-pkix:1.0.1
org.apache.kerby:kerby-util:1.0.1
org.apache.kerby:kerby-xdr:1.0.1
org.apache.kerby:token-provider:1.0.1
org.apache.yetus:audience-annotations:0.5.0
org.apache.zookeeper:zookeeper:3.6.3
org.codehaus.jettison:jettison:1.5.4
org.eclipse.jetty:jetty-annotations:9.4.51.v20230217
org.eclipse.jetty:jetty-http:9.4.51.v20230217
org.eclipse.jetty:jetty-io:9.4.51.v20230217
org.eclipse.jetty:jetty-jndi:9.4.51.v20230217
org.eclipse.jetty:jetty-plus:9.4.51.v20230217
org.eclipse.jetty:jetty-security:9.4.51.v20230217
org.eclipse.jetty:jetty-server:9.4.51.v20230217
org.eclipse.jetty:jetty-servlet:9.4.51.v20230217
org.eclipse.jetty:jetty-util:9.4.51.v20230217
org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217
org.eclipse.jetty:jetty-webapp:9.4.51.v20230217
org.eclipse.jetty:jetty-xml:9.4.51.v20230217
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.51.v20230217
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217
org.apache.zookeeper:zookeeper:3.4.13
org.codehaus.jackson:jackson-core-asl:1.9.13
org.codehaus.jackson:jackson-jaxrs:1.9.13
org.codehaus.jackson:jackson-mapper-asl:1.9.13
org.codehaus.jackson:jackson-xc:1.9.13
org.codehaus.jettison:jettison:1.1
org.eclipse.jetty:jetty-annotations:9.3.27.v20190418
org.eclipse.jetty:jetty-http:9.3.27.v20190418
org.eclipse.jetty:jetty-io:9.3.27.v20190418
org.eclipse.jetty:jetty-jndi:9.3.27.v20190418
org.eclipse.jetty:jetty-plus:9.3.27.v20190418
org.eclipse.jetty:jetty-security:9.3.27.v20190418
org.eclipse.jetty:jetty-server:9.3.27.v20190418
org.eclipse.jetty:jetty-servlet:9.3.27.v20190418
org.eclipse.jetty:jetty-util:9.3.27.v20190418
org.eclipse.jetty:jetty-util-ajax:9.3.27.v20190418
org.eclipse.jetty:jetty-webapp:9.3.27.v20190418
org.eclipse.jetty:jetty-xml:9.3.27.v20190418
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.3.27.v20190418
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.3.27.v20190418
org.ehcache:ehcache:3.3.1
org.ini4j:ini4j:0.5.4
org.jetbrains.kotlin:kotlin-stdlib:1.4.10
org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10
org.lz4:lz4-java:1.7.1
org.lz4:lz4-java:1.6.0
org.objenesis:objenesis:2.6
org.xerial.snappy:snappy-java:1.0.5
org.yaml:snakeyaml:2.0
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
org.yaml:snakeyaml:1.16:
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
--------------------------------------------------------------------------------
@ -379,7 +364,7 @@ hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/com
hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/util/tree.h
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/compat/{fstatat|openat|unlinkat}.h
com.github.luben:zstd-jni:1.4.9-1
com.github.luben:zstd-jni:1.4.3-1
dnsjava:dnsjava:2.1.7
org.codehaus.woodstox:stax2-api:4.2.1
@ -420,14 +405,14 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.css
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.6.0.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.5.1.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.11.5/*
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.10.18/*
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL
@ -435,7 +420,7 @@ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanage
bootstrap v3.3.6
broccoli-asset-rev v2.4.2
broccoli-funnel v1.0.1
datatables v1.11.5
datatables v1.10.8
em-helpers v0.5.13
em-table v0.1.6
ember v2.2.0
@ -499,12 +484,12 @@ org.slf4j:slf4j-log4j12:1.7.25
CDDL 1.1 + GPLv2 with classpath exception
-----------------------------------------
com.github.pjfanning:jersey-json:1.20
com.sun.jersey:jersey-client:1.19.4
com.sun.jersey:jersey-core:1.19.4
com.sun.jersey:jersey-guice:1.19.4
com.sun.jersey:jersey-server:1.19.4
com.sun.jersey:jersey-servlet:1.19.4
com.sun.jersey:jersey-client:1.19
com.sun.jersey:jersey-core:1.19
com.sun.jersey:jersey-guice:1.19
com.sun.jersey:jersey-json:1.19
com.sun.jersey:jersey-server:1.19
com.sun.jersey:jersey-servlet:1.19
com.sun.xml.bind:jaxb-impl:2.2.3-1
javax.annotation:javax.annotation-api:1.3.2
javax.servlet:javax.servlet-api:3.1.0
@ -517,21 +502,19 @@ javax.xml.bind:jaxb-api:2.2.11
Eclipse Public License 1.0
--------------------------
junit:junit:4.13.2
org.jacoco:org.jacoco.agent:0.8.5
junit:junit:4.12
HSQL License
------------
org.hsqldb:hsqldb:2.7.1
org.hsqldb:hsqldb:2.3.4
JDOM License
------------
org.jdom:jdom2:2.0.6.1
org.jdom:jdom:1.1
Public Domain

View File

@ -245,14 +245,14 @@ hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.css
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dataTables.bootstrap.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.6.0.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.5.1.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery.dataTables.min.js
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.11.5/*
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/dt-1.10.18/*
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/resources/TERMINAL

View File

@ -66,7 +66,7 @@ available from http://www.digip.org/jansson/.
AWS SDK for Java
Copyright 2010-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
This product includes software developed by
Amazon Technologies, Inc (http://www.amazon.com/).

View File

@ -15,30 +15,6 @@
// specific language governing permissions and limitations
// under the License.
def getGithubCreds() {
return [usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
passwordVariable: 'GITHUB_TOKEN',
usernameVariable: 'GITHUB_USER')]
}
// Publish JUnit results only if there are XML files under surefire-reports
def publishJUnitResults() {
def findCmdExitCode = sh script: "find ${SOURCEDIR} -wholename */target/surefire-reports/*.xml | egrep .", returnStatus: true
boolean surefireReportsExist = findCmdExitCode == 0
if (surefireReportsExist) {
echo "XML files found under surefire-reports, running junit"
// The path should be relative to WORKSPACE for the junit.
SRC = "${SOURCEDIR}/**/target/surefire-reports/*.xml".replace("$WORKSPACE/","")
try {
junit "${SRC}"
} catch(e) {
echo 'junit processing: ' + e.toString()
}
} else {
echo "No XML files found under surefire-reports, skipping junit"
}
}
pipeline {
agent {
@ -47,15 +23,19 @@ pipeline {
options {
buildDiscarder(logRotator(numToKeepStr: '5'))
timeout (time: 48, unit: 'HOURS')
timeout (time: 20, unit: 'HOURS')
timestamps()
checkoutToSubdirectory('src')
}
environment {
SOURCEDIR = 'src'
// will also need to change notification section below
PATCHDIR = 'out'
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
YETUS='yetus'
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
YETUS_VERSION='rel/0.14.0'
YETUS_VERSION='f9ba0170a5787a5f4662d3769804fef0226a182f'
}
parameters {
@ -77,260 +57,173 @@ pipeline {
}
}
// Setup codebase so that each platform's build happens in its own exclusive copy of the
// codebase.
// Primarily because YETUS messes up the git branch information and affects the subsequent
// optional stages after the first one.
stage ('setup sources') {
stage ('precommit-run') {
steps {
dir("${WORKSPACE}/centos-7") {
sh '''#!/usr/bin/env bash
withCredentials(
[usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
passwordVariable: 'GITHUB_TOKEN',
usernameVariable: 'GITHUB_USER'),
usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
passwordVariable: 'JIRA_PASSWORD',
usernameVariable: 'JIRA_USER')]) {
sh '''#!/usr/bin/env bash
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-7
'''
}
set -e
dir("${WORKSPACE}/centos-8") {
sh '''#!/usr/bin/env bash
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/test-patch.sh"
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-8
'''
}
# this must be clean for every run
if [[ -d "${WORKSPACE}/${PATCHDIR}" ]]; then
rm -rf "${WORKSPACE}/${PATCHDIR}"
fi
mkdir -p "${WORKSPACE}/${PATCHDIR}"
dir("${WORKSPACE}/debian-10") {
sh '''#!/usr/bin/env bash
# if given a JIRA issue, process it. If CHANGE_URL is set
# (e.g., Github Branch Source plugin), process it.
# otherwise exit, because we don't want Hadoop to do a
# full build. We wouldn't normally do this check for smaller
# projects. :)
if [[ -n "${JIRA_ISSUE_KEY}" ]]; then
YETUS_ARGS+=("${JIRA_ISSUE_KEY}")
elif [[ -z "${CHANGE_URL}" ]]; then
echo "Full build skipped" > "${WORKSPACE}/${PATCHDIR}/report.html"
exit 0
fi
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/debian-10
'''
}
YETUS_ARGS+=("--patch-dir=${WORKSPACE}/${PATCHDIR}")
dir("${WORKSPACE}/ubuntu-focal") {
sh '''#!/usr/bin/env bash
# where the source is located
YETUS_ARGS+=("--basedir=${WORKSPACE}/${SOURCEDIR}")
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/ubuntu-focal
'''
# our project defaults come from a personality file
YETUS_ARGS+=("--project=hadoop")
YETUS_ARGS+=("--personality=${WORKSPACE}/${SOURCEDIR}/dev-support/bin/hadoop.sh")
# lots of different output formats
YETUS_ARGS+=("--brief-report-file=${WORKSPACE}/${PATCHDIR}/brief.txt")
YETUS_ARGS+=("--console-report-file=${WORKSPACE}/${PATCHDIR}/console.txt")
YETUS_ARGS+=("--html-report-file=${WORKSPACE}/${PATCHDIR}/report.html")
# enable writing back to Github
YETUS_ARGS+=(--github-token="${GITHUB_TOKEN}")
# enable writing back to ASF JIRA
YETUS_ARGS+=(--jira-password="${JIRA_PASSWORD}")
YETUS_ARGS+=(--jira-user="${JIRA_USER}")
# auto-kill any surefire stragglers during unit test runs
YETUS_ARGS+=("--reapermode=kill")
# set relatively high limits for ASF machines
# changing these to higher values may cause problems
# with other jobs on systemd-enabled machines
YETUS_ARGS+=("--proclimit=5500")
YETUS_ARGS+=("--dockermemlimit=22g")
# -1 spotbugs issues that show up prior to the patch being applied
YETUS_ARGS+=("--spotbugs-strict-precheck")
# rsync these files back into the archive dir
YETUS_ARGS+=("--archive-list=checkstyle-errors.xml,spotbugsXml.xml")
# URL for user-side presentation in reports and such to our artifacts
# (needs to match the archive bits below)
YETUS_ARGS+=("--build-url-artifacts=artifact/out")
# plugins to enable
YETUS_ARGS+=("--plugins=all")
# don't let these tests cause -1s because we aren't really paying that
# much attention to them
YETUS_ARGS+=("--tests-filter=checkstyle")
# run in docker mode and specifically point to our
# Dockerfile since we don't want to use the auto-pulled version.
YETUS_ARGS+=("--docker")
YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
YETUS_ARGS+=("--mvn-custom-repos")
# effectively treat dev-suport as a custom maven module
YETUS_ARGS+=("--skip-dirs=dev-support")
# help keep the ASF boxes clean
YETUS_ARGS+=("--sentinel")
# test with Java 8 and 11
YETUS_ARGS+=("--java-home=/usr/lib/jvm/java-8-openjdk-amd64")
YETUS_ARGS+=("--multijdkdirs=/usr/lib/jvm/java-11-openjdk-amd64")
YETUS_ARGS+=("--multijdktests=compile")
# custom javadoc goals
YETUS_ARGS+=("--mvn-javadoc-goals=process-sources,javadoc:javadoc-no-fork")
# write Yetus report as GitHub comment (YETUS-1102)
YETUS_ARGS+=("--github-write-comment")
YETUS_ARGS+=("--github-use-emoji-vote")
"${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
'''
}
}
}
// This is an optional stage which runs only when there's a change in
// C++/C++ build/platform.
// This stage serves as a means of cross platform validation, which is
// really needed to ensure that any C++ related/platform change doesn't
// break the Hadoop build on Centos 7.
stage ('precommit-run Centos 7') {
environment {
SOURCEDIR = "${WORKSPACE}/centos-7/src"
PATCHDIR = "${WORKSPACE}/centos-7/out"
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7"
IS_OPTIONAL = 1
}
steps {
withCredentials(getGithubCreds()) {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
'''
}
}
post {
// Since this is an optional platform, we want to copy the artifacts
// and archive it only if the build fails, to help with debugging.
failure {
sh '''#!/usr/bin/env bash
cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}"
'''
archiveArtifacts "out/**"
}
cleanup() {
script {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
'''
}
}
}
}
// This is an optional stage which runs only when there's a change in
// C++/C++ build/platform.
// This stage serves as a means of cross platform validation, which is
// really needed to ensure that any C++ related/platform change doesn't
// break the Hadoop build on Centos 8.
stage ('precommit-run Centos 8') {
environment {
SOURCEDIR = "${WORKSPACE}/centos-8/src"
PATCHDIR = "${WORKSPACE}/centos-8/out"
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8"
IS_OPTIONAL = 1
}
steps {
withCredentials(getGithubCreds()) {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
'''
}
}
post {
// Since this is an optional platform, we want to copy the artifacts
// and archive it only if the build fails, to help with debugging.
failure {
sh '''#!/usr/bin/env bash
cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}"
'''
archiveArtifacts "out/**"
}
cleanup() {
script {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
'''
}
}
}
}
// This is an optional stage which runs only when there's a change in
// C++/C++ build/platform.
// This stage serves as a means of cross platform validation, which is
// really needed to ensure that any C++ related/platform change doesn't
// break the Hadoop build on Debian 10.
stage ('precommit-run Debian 10') {
environment {
SOURCEDIR = "${WORKSPACE}/debian-10/src"
PATCHDIR = "${WORKSPACE}/debian-10/out"
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10"
IS_OPTIONAL = 1
}
steps {
withCredentials(getGithubCreds()) {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
'''
}
}
post {
// Since this is an optional platform, we want to copy the artifacts
// and archive it only if the build fails, to help with debugging.
failure {
sh '''#!/usr/bin/env bash
cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}"
'''
archiveArtifacts "out/**"
}
cleanup() {
script {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
'''
}
}
}
}
// We want to use Ubuntu Focal as our main CI and thus, this stage
// isn't optional (runs for all the PRs).
stage ('precommit-run Ubuntu focal') {
environment {
SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src"
PATCHDIR = "${WORKSPACE}/ubuntu-focal/out"
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
IS_OPTIONAL = 0
}
steps {
withCredentials(getGithubCreds()) {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
'''
}
}
post {
always {
script {
// Publish status if it was missed (YETUS-1059)
withCredentials(
[usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd',
passwordVariable: 'GITHUB_TOKEN',
usernameVariable: 'GITHUB_USER')]) {
sh '''#!/usr/bin/env bash
# Copy the artifacts of Ubuntu focal build to workspace
cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}"
# Send Github status
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery
'''
}
// YETUS output
archiveArtifacts "out/**"
// Publish the HTML report so that it can be looked at
// Has to be relative to WORKSPACE.
publishHTML (target: [
allowMissing: true,
keepAll: true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE
reportDir: "out",
reportFiles: 'report.html',
reportName: 'Yetus Report'
])
publishJUnitResults()
}
}
cleanup() {
script {
sh '''#!/usr/bin/env bash
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
'''
}
}
}
}
}
post {
always {
script {
// Publish status if it was missed (YETUS-1059)
withCredentials(
[usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd',
passwordVariable: 'GITHUB_TOKEN',
usernameVariable: 'GITHUB_USER')]) {
sh '''#!/usr/bin/env bash
YETUS_ARGS+=("--github-token=${GITHUB_TOKEN}")
YETUS_ARGS+=("--patch-dir=${WORKSPACE}/${PATCHDIR}")
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/github-status-recovery.sh"
/usr/bin/env bash "${TESTPATCHBIN}" "${YETUS_ARGS[@]}" ${EXTRA_ARGS} || true
'''
}
// Yetus output
archiveArtifacts "${env.PATCHDIR}/**"
// Publish the HTML report so that it can be looked at
// Has to be relative to WORKSPACE.
publishHTML (target: [
allowMissing: true,
keepAll: true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE
reportDir: "${env.PATCHDIR}",
reportFiles: 'report.html',
reportName: 'Yetus Report'
])
// Publish JUnit results
try {
junit "${env.SOURCEDIR}/**/target/surefire-reports/*.xml"
} catch(e) {
echo 'junit processing: ' + e.toString()
}
}
}
// Jenkins pipeline jobs fill slaves on PRs without this :(
cleanup() {
script {
sh '''#!/usr/bin/env bash
# See HADOOP-13951
chmod -R u+rxw "${WORKSPACE}"
'''
sh '''
# See YETUS-764
if [ -f "${WORKSPACE}/${PATCHDIR}/pidfile.txt" ]; then
echo "test-patch process appears to still be running: killing"
kill `cat "${WORKSPACE}/${PATCHDIR}/pidfile.txt"` || true
sleep 10
fi
if [ -f "${WORKSPACE}/${PATCHDIR}/cidfile.txt" ]; then
echo "test-patch container appears to still be running: killing"
docker kill `cat "${WORKSPACE}/${PATCHDIR}/cidfile.txt"` || true
fi
# See HADOOP-13951
chmod -R u+rxw "${WORKSPACE}"
'''
deleteDir()
}
}

View File

@ -175,7 +175,7 @@ def run_java_acc(src_name, src_jars, dst_name, dst_jars, annotations):
if annotations is not None:
annotations_path = os.path.join(get_scratch_dir(), "annotations.txt")
with open(annotations_path, "w") as f:
with file(annotations_path, "w") as f:
for ann in annotations:
print(ann, file=f)
args += ["-annotations-list", annotations_path]

View File

@ -293,7 +293,6 @@ function usage
echo "--security Emergency security release"
echo "--sign Use .gnupg dir to sign the artifacts and jars"
echo "--version=[version] Use an alternative version string"
echo "--mvnargs=[args] Extra Maven args to be provided when running mvn commands"
}
function option_parse
@ -348,9 +347,6 @@ function option_parse
--version=*)
HADOOP_VERSION=${i#*=}
;;
--mvnargs=*)
MVNEXTRAARGS=${i#*=}
;;
esac
done
@ -417,9 +413,6 @@ function option_parse
MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
fi
fi
if [ -n "$MVNEXTRAARGS" ]; then
MVN_ARGS+=("$MVNEXTRAARGS")
fi
if [[ "${SECURITYRELEASE}" = true ]]; then
if [[ ! -d "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}" ]]; then
@ -521,7 +514,7 @@ function dockermode
echo "USER ${user_name}"
printf "\n\n"
) | docker build -t "${imgname}" -f - "${BASEDIR}"/dev-support/docker/
) | docker build -t "${imgname}" -
run docker run -i -t \
--privileged \
@ -542,10 +535,6 @@ function makearelease
big_console_header "Cleaning the Source Tree"
# Since CVE-2022-24765 in April 2022, git refuses to work in directories
# whose owner != the current user, unless explicitly told to trust it.
git config --global --add safe.directory /build/source
# git clean to clear any remnants from previous build
run "${GIT}" clean -xdf -e /patchprocess

View File

@ -20,20 +20,6 @@
# Override these to match Apache Hadoop's requirements
personality_plugins "all,-ant,-gradle,-scalac,-scaladoc"
# These flags are needed to run Yetus against Hadoop on Windows.
WINDOWS_FLAGS="-Pnative-win
-Dhttps.protocols=TLSv1.2
-Drequire.openssl
-Drequire.test.libhadoop
-Dshell-executable=${BASH_EXECUTABLE}
-Dopenssl.prefix=${VCPKG_INSTALLED_PACKAGES}
-Dcmake.prefix.path=${VCPKG_INSTALLED_PACKAGES}
-Dwindows.cmake.toolchain.file=${CMAKE_TOOLCHAIN_FILE}
-Dwindows.cmake.build.type=RelWithDebInfo
-Dwindows.build.hdfspp.dll=off
-Dwindows.no.sasl=on
-Duse.platformToolsetVersion=v142"
## @description Globals specific to this personality
## @audience private
## @stability evolving
@ -101,30 +87,17 @@ function hadoop_order
echo "${hadoopm}"
}
## @description Retrieves the Hadoop project version defined in the root pom.xml
## @audience private
## @stability evolving
## @returns 0 on success, 1 on failure
function load_hadoop_version
{
if [[ -f "${BASEDIR}/pom.xml" ]]; then
HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
| head -1 \
| "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
| cut -f1 -d- )
return 0
else
return 1
fi
}
## @description Determine if it is safe to run parallel tests
## @audience private
## @stability evolving
## @param ordering
function hadoop_test_parallel
{
if load_hadoop_version; then
if [[ -f "${BASEDIR}/pom.xml" ]]; then
HADOOP_VERSION=$(grep '<version>' "${BASEDIR}/pom.xml" \
| head -1 \
| "${SED}" -e 's|^ *<version>||' -e 's|</version>.*$||' \
| cut -f1 -d- )
export HADOOP_VERSION
else
return 1
@ -289,10 +262,7 @@ function hadoop_native_flags
Windows_NT|CYGWIN*|MINGW*|MSYS*)
echo \
"${args[@]}" \
-Drequire.snappy \
-Pdist \
-Dtar \
"${WINDOWS_FLAGS}"
-Drequire.snappy -Drequire.openssl -Pnative-win
;;
*)
echo \
@ -385,7 +355,6 @@ function personality_modules
fi
;;
unit)
extra="-Dsurefire.rerunFailingTestsCount=2"
if [[ "${BUILDMODE}" = full ]]; then
ordering=mvnsrc
elif [[ "${CHANGED_MODULES[*]}" =~ \. ]]; then
@ -394,7 +363,7 @@ function personality_modules
if [[ ${TEST_PARALLEL} = "true" ]] ; then
if hadoop_test_parallel; then
extra="${extra} -Pparallel-tests"
extra="-Pparallel-tests"
if [[ -n ${TEST_THREADS:-} ]]; then
extra="${extra} -DtestsThreadCount=${TEST_THREADS}"
fi
@ -435,10 +404,7 @@ function personality_modules
extra="${extra} ${flags}"
fi
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
extra="-Ptest-patch -Pdist -Dtar ${WINDOWS_FLAGS} ${extra}"
fi
extra="-Ptest-patch ${extra}"
for module in $(hadoop_order ${ordering}); do
# shellcheck disable=SC2086
personality_enqueue_module ${module} ${extra}
@ -546,7 +512,7 @@ function shadedclient_initialize
maven_add_install shadedclient
}
## @description build client facing shaded and non-shaded artifacts and test them
## @description build client facing shaded artifacts and test them
## @audience private
## @stability evolving
## @param repostatus
@ -579,30 +545,12 @@ function shadedclient_rebuild
return 0
fi
big_console_header "Checking client artifacts on ${repostatus} with shaded clients"
extra="-Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true"
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
if load_hadoop_version; then
export HADOOP_HOME="${SOURCEDIR}/hadoop-dist/target/hadoop-${HADOOP_VERSION}-SNAPSHOT"
else
yetus_error "[WARNING] Unable to extract the Hadoop version and thus HADOOP_HOME is not set. Some tests may fail."
fi
extra="${WINDOWS_FLAGS} ${extra}"
fi
echo_and_redirect "${logfile}" \
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am "${modules[@]}" "${extra}"
big_console_header "Checking client artifacts on ${repostatus} with non-shaded clients"
big_console_header "Checking client artifacts on ${repostatus}"
echo_and_redirect "${logfile}" \
"${MAVEN}" "${MAVEN_ARGS[@]}" verify -fae --batch-mode -am \
"${modules[@]}" \
-DskipShade -Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true \
-Dspotbugs.skip=true "${extra}"
-Dtest=NoUnitTests -Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dspotbugs.skip=true
count=$("${GREP}" -c '\[ERROR\]' "${logfile}")
if [[ ${count} -gt 0 ]]; then

View File

@ -77,7 +77,7 @@ WANTED="$1"
shift
ARGV=("$@")
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.0}
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.13.0}
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
BINDIR=$(dirname "${BIN}")
@ -171,17 +171,7 @@ if [[ -n "${GPGBIN}" && ! "${HADOOP_SKIP_YETUS_VERIFICATION}" = true ]]; then
fi
fi
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
gunzip -c "${TARBALL}.gz" | tar xpf -
# One of the entries in the Yetus tarball unzips a symlink qbt.sh.
# The symlink creation fails on Windows, unless this CI is run as Admin or Developer mode is
# enabled.
# Thus, we create the qbt.sh symlink ourselves and move it to the target.
YETUS_PRECOMMIT_DIR="${YETUS_PREFIX}-${HADOOP_YETUS_VERSION}/lib/precommit"
ln -s "${YETUS_PRECOMMIT_DIR}/test-patch.sh" qbt.sh
mv qbt.sh "${YETUS_PRECOMMIT_DIR}"
elif ! (gunzip -c "${TARBALL}.gz" | tar xpf -); then
if ! (gunzip -c "${TARBALL}.gz" | tar xpf -); then
yetus_error "ERROR: ${TARBALL}.gz is corrupt. Investigate and then remove ${HADOOP_PATCHPROCESS} to try again."
exit 1
fi

View File

@ -1,75 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<code_scheme name="Hadoop" version="173">
<option name="RIGHT_MARGIN" value="100" />
<JavaCodeStyleSettings>
<option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
<option name="NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
<option name="PACKAGES_TO_USE_IMPORT_ON_DEMAND">
<value />
</option>
<option name="IMPORT_LAYOUT_TABLE">
<value>
<package name="java" withSubpackages="true" static="false" />
<package name="javax" withSubpackages="true" static="false" />
<emptyLine />
<package name="org.apache.hadoop.thirdparty" withSubpackages="true" static="false" />
<package name="" withSubpackages="true" static="false" />
<emptyLine />
<package name="org.apache" withSubpackages="true" static="false" />
<emptyLine />
<package name="" withSubpackages="true" static="true" />
</value>
</option>
</JavaCodeStyleSettings>
<codeStyleSettings language="JAVA">
<option name="RIGHT_MARGIN" value="100" />
<option name="KEEP_LINE_BREAKS" value="false" />
<option name="KEEP_FIRST_COLUMN_COMMENT" value="false" />
<option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
<option name="KEEP_BLANK_LINES_IN_DECLARATIONS" value="1" />
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
<option name="KEEP_BLANK_LINES_BEFORE_RBRACE" value="1" />
<option name="INDENT_CASE_FROM_SWITCH" value="false" />
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
<option name="ALIGN_MULTILINE_RESOURCES" value="false" />
<option name="SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE" value="true" />
<option name="CALL_PARAMETERS_WRAP" value="1" />
<option name="METHOD_PARAMETERS_WRAP" value="1" />
<option name="RESOURCE_LIST_WRAP" value="5" />
<option name="EXTENDS_LIST_WRAP" value="1" />
<option name="THROWS_LIST_WRAP" value="1" />
<option name="EXTENDS_KEYWORD_WRAP" value="1" />
<option name="THROWS_KEYWORD_WRAP" value="1" />
<option name="METHOD_CALL_CHAIN_WRAP" value="1" />
<option name="BINARY_OPERATION_WRAP" value="1" />
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
<option name="TERNARY_OPERATION_WRAP" value="5" />
<option name="ARRAY_INITIALIZER_WRAP" value="1" />
<option name="ASSIGNMENT_WRAP" value="1" />
<option name="METHOD_ANNOTATION_WRAP" value="2" />
<option name="CLASS_ANNOTATION_WRAP" value="2" />
<option name="FIELD_ANNOTATION_WRAP" value="2" />
<option name="VARIABLE_ANNOTATION_WRAP" value="2" />
<indentOptions>
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="4" />
<option name="TAB_SIZE" value="2" />
</indentOptions>
</codeStyleSettings>
</code_scheme>

View File

@ -32,27 +32,59 @@ RUN echo APT::Install-Suggests "0"\; >> /etc/apt/apt.conf.d/10disableextras
ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_TERSE true
######
# Platform package dependency resolver
######
COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
&& chmod a+r pkg-resolver/*.json
######
# Install packages from apt
######
# hadolint ignore=DL3008,SC2046
# hadolint ignore=DL3008
RUN apt-get -q update \
&& apt-get -q install -y --no-install-recommends python3 \
&& apt-get -q install -y --no-install-recommends $(pkg-resolver/resolve.py ubuntu:focal) \
&& apt-get -q install -y --no-install-recommends \
ant \
apt-utils \
bats \
build-essential \
bzip2 \
clang \
cmake \
curl \
doxygen \
fuse \
g++ \
gcc \
git \
gnupg-agent \
hugo \
libbcprov-java \
libbz2-dev \
libcurl4-openssl-dev \
libfuse-dev \
libprotobuf-dev \
libprotoc-dev \
libsasl2-dev \
libsnappy-dev \
libssl-dev \
libtool \
libzstd-dev \
locales \
make \
maven \
nodejs \
node-yarn \
npm \
openjdk-11-jdk \
openjdk-8-jdk \
pinentry-curses \
pkg-config \
python3 \
python3-pip \
python3-pkg-resources \
python3-setuptools \
python3-wheel \
rsync \
shellcheck \
software-properties-common \
sudo \
valgrind \
zlib1g-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN locale-gen en_US.UTF-8
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8'
ENV PYTHONIOENCODING=utf-8
######
# Set env vars required to build Hadoop
######
@ -61,34 +93,97 @@ ENV MAVEN_HOME /usr
ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
#######
# Set env vars for SpotBugs 4.2.2
# Install SpotBugs 4.2.2
#######
RUN mkdir -p /opt/spotbugs \
&& curl -L -s -S https://github.com/spotbugs/spotbugs/releases/download/4.2.2/spotbugs-4.2.2.tgz \
-o /opt/spotbugs.tgz \
&& tar xzf /opt/spotbugs.tgz --strip-components 1 -C /opt/spotbugs \
&& chmod +x /opt/spotbugs/bin/*
ENV SPOTBUGS_HOME /opt/spotbugs
#######
# Set env vars for Google Protobuf 3.7.1
# Install Boost 1.72 (1.71 ships with Focal)
#######
# hadolint ignore=DL3003
RUN mkdir -p /opt/boost-library \
&& curl -L https://sourceforge.net/projects/boost/files/boost/1.72.0/boost_1_72_0.tar.bz2/download > boost_1_72_0.tar.bz2 \
&& mv boost_1_72_0.tar.bz2 /opt/boost-library \
&& cd /opt/boost-library \
&& tar --bzip2 -xf boost_1_72_0.tar.bz2 \
&& cd /opt/boost-library/boost_1_72_0 \
&& ./bootstrap.sh --prefix=/usr/ \
&& ./b2 --without-python install \
&& cd /root \
&& rm -rf /opt/boost-library
######
# Install Google Protobuf 3.7.1 (3.6.1 ships with Focal)
######
# hadolint ignore=DL3003
RUN mkdir -p /opt/protobuf-src \
&& curl -L -s -S \
https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \
-o /opt/protobuf.tar.gz \
&& tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src \
&& cd /opt/protobuf-src \
&& ./configure --prefix=/opt/protobuf \
&& make "-j$(nproc)" \
&& make install \
&& cd /root \
&& rm -rf /opt/protobuf-src
ENV PROTOBUF_HOME /opt/protobuf
ENV PATH "${PATH}:/opt/protobuf/bin"
####
# Install pylint and python-dateutil
####
RUN pip3 install pylint==2.6.0 python-dateutil==2.8.1
####
# Install bower
####
# hadolint ignore=DL3008
RUN npm install -g bower@1.8.8
###
# Install hadolint
####
RUN curl -L -s -S \
https://github.com/hadolint/hadolint/releases/download/v1.11.1/hadolint-Linux-x86_64 \
-o /bin/hadolint \
&& chmod a+rx /bin/hadolint \
&& shasum -a 512 /bin/hadolint | \
awk '$1!="734e37c1f6619cbbd86b9b249e69c9af8ee1ea87a2b1ff71dccda412e9dac35e63425225a95d71572091a3f0a11e9a04c2fc25d9e91b840530c26af32b9891ca" {exit(1)}'
######
# Intel ISA-L 2.29.0
######
# hadolint ignore=DL3003,DL3008
RUN mkdir -p /opt/isa-l-src \
&& apt-get -q update \
&& apt-get install -y --no-install-recommends automake yasm \
&& apt-get clean \
&& curl -L -s -S \
https://github.com/intel/isa-l/archive/v2.29.0.tar.gz \
-o /opt/isa-l.tar.gz \
&& tar xzf /opt/isa-l.tar.gz --strip-components 1 -C /opt/isa-l-src \
&& cd /opt/isa-l-src \
&& ./autogen.sh \
&& ./configure \
&& make "-j$(nproc)" \
&& make install \
&& cd /root \
&& rm -rf /opt/isa-l-src
###
# Avoid out of memory errors in builds
###
ENV MAVEN_OPTS -Xms256m -Xmx3072m
ENV MAVEN_OPTS -Xms256m -Xmx1536m
# Skip gpg verification when downloading Yetus via yetus-wrapper
ENV HADOOP_SKIP_YETUS_VERIFICATION true
####
# Install packages
####
RUN pkg-resolver/install-common-pkgs.sh
RUN pkg-resolver/install-spotbugs.sh ubuntu:focal
RUN pkg-resolver/install-boost.sh ubuntu:focal
RUN pkg-resolver/install-protobuf.sh ubuntu:focal
RUN pkg-resolver/install-hadolint.sh ubuntu:focal
RUN pkg-resolver/install-intel-isa-l.sh ubuntu:focal
###
# Everything past this point is either not needed for testing or breaks Yetus.
# So tell Yetus not to read the rest of the file:

View File

@ -33,26 +33,62 @@ ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_TERSE true
######
# Platform package dependency resolver
# Install common dependencies from packages. Versions here are either
# sufficient or irrelevant.
######
COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
&& chmod a+r pkg-resolver/*.json
######
# Install packages from apt
######
# hadolint ignore=DL3008,SC2046
# hadolint ignore=DL3008
RUN apt-get -q update \
&& apt-get -q install -y --no-install-recommends python3 \
&& apt-get -q install -y --no-install-recommends $(pkg-resolver/resolve.py ubuntu:focal::arch64) \
&& apt-get -q install -y --no-install-recommends \
ant \
apt-utils \
bats \
build-essential \
bzip2 \
clang \
cmake \
curl \
doxygen \
fuse \
g++ \
gcc \
git \
gnupg-agent \
hugo \
libbcprov-java \
libbz2-dev \
libcurl4-openssl-dev \
libfuse-dev \
libprotobuf-dev \
libprotoc-dev \
libsasl2-dev \
libsnappy-dev \
libssl-dev \
libtool \
libzstd-dev \
locales \
make \
maven \
nodejs \
node-yarn \
npm \
openjdk-11-jdk \
openjdk-8-jdk \
pinentry-curses \
pkg-config \
python3 \
python3-pip \
python3-pkg-resources \
python3-setuptools \
python3-wheel \
rsync \
shellcheck \
software-properties-common \
sudo \
valgrind \
zlib1g-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN locale-gen en_US.UTF-8
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8'
ENV PYTHONIOENCODING=utf-8
######
# Set env vars required to build Hadoop
######
@ -61,35 +97,78 @@ ENV MAVEN_HOME /usr
ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-arm64
#######
# Set env vars for SpotBugs 4.2.2
# Install SpotBugs 4.2.2
#######
RUN mkdir -p /opt/spotbugs \
&& curl -L -s -S https://github.com/spotbugs/spotbugs/releases/download/4.2.2/spotbugs-4.2.2.tgz \
-o /opt/spotbugs.tgz \
&& tar xzf /opt/spotbugs.tgz --strip-components 1 -C /opt/spotbugs \
&& chmod +x /opt/spotbugs/bin/*
ENV SPOTBUGS_HOME /opt/spotbugs
#######
# Set env vars for Google Protobuf 3.7.1
# Install Boost 1.72 (1.71 ships with Focal)
#######
# hadolint ignore=DL3003
RUN mkdir -p /opt/boost-library \
&& curl -L https://sourceforge.net/projects/boost/files/boost/1.72.0/boost_1_72_0.tar.bz2/download > boost_1_72_0.tar.bz2 \
&& mv boost_1_72_0.tar.bz2 /opt/boost-library \
&& cd /opt/boost-library \
&& tar --bzip2 -xf boost_1_72_0.tar.bz2 \
&& cd /opt/boost-library/boost_1_72_0 \
&& ./bootstrap.sh --prefix=/usr/ \
&& ./b2 --without-python install \
&& cd /root \
&& rm -rf /opt/boost-library
######
# Install Google Protobuf 3.7.1 (3.6.1 ships with Focal)
######
# hadolint ignore=DL3003
RUN mkdir -p /opt/protobuf-src \
&& curl -L -s -S \
https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \
-o /opt/protobuf.tar.gz \
&& tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src \
&& cd /opt/protobuf-src \
&& ./configure --prefix=/opt/protobuf \
&& make "-j$(nproc)" \
&& make install \
&& cd /root \
&& rm -rf /opt/protobuf-src
ENV PROTOBUF_HOME /opt/protobuf
ENV PATH "${PATH}:/opt/protobuf/bin"
####
# Install pylint and python-dateutil
####
RUN pip3 install pylint==2.6.0 python-dateutil==2.8.1
####
# Install bower
####
# hadolint ignore=DL3008
RUN npm install -g bower@1.8.8
###
# Install phantomjs built for aarch64
####
RUN mkdir -p /opt/phantomjs \
&& curl -L -s -S \
https://github.com/liusheng/phantomjs/releases/download/2.1.1/phantomjs-2.1.1-linux-aarch64.tar.bz2 \
-o /opt/phantomjs/phantomjs-2.1.1-linux-aarch64.tar.bz2 \
&& tar xvjf /opt/phantomjs/phantomjs-2.1.1-linux-aarch64.tar.bz2 --strip-components 1 -C /opt/phantomjs \
&& cp /opt/phantomjs/bin/phantomjs /usr/bin/ \
&& rm -rf /opt/phantomjs
###
# Avoid out of memory errors in builds
###
ENV MAVEN_OPTS -Xms256m -Xmx3072m
ENV MAVEN_OPTS -Xms256m -Xmx1536m
# Skip gpg verification when downloading Yetus via yetus-wrapper
ENV HADOOP_SKIP_YETUS_VERIFICATION true
# Force PhantomJS to be in 'headless' mode, do not connect to Xwindow
ENV QT_QPA_PLATFORM offscreen
####
# Install packages
####
RUN pkg-resolver/install-common-pkgs.sh
RUN pkg-resolver/install-spotbugs.sh ubuntu:focal::arch64
RUN pkg-resolver/install-boost.sh ubuntu:focal::arch64
RUN pkg-resolver/install-protobuf.sh ubuntu:focal::arch64
###
# Everything past this point is either not needed for testing or breaks Yetus.
# So tell Yetus not to read the rest of the file:

View File

@ -1,96 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Dockerfile for installing the necessary dependencies for building Hadoop.
# See BUILDING.txt.
FROM centos:7
WORKDIR /root
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
######
# Platform package dependency resolver
######
COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
&& chmod a+r pkg-resolver/*.json
######
# Install packages from yum
######
# hadolint ignore=DL3008,SC2046
RUN yum update -y \
&& yum groupinstall -y "Development Tools" \
&& yum install -y \
centos-release-scl \
python3 \
&& yum install -y $(pkg-resolver/resolve.py centos:7)
# Set GCC 9 as the default C/C++ compiler
RUN echo "source /opt/rh/devtoolset-9/enable" >> /etc/bashrc
SHELL ["/bin/bash", "--login", "-c"]
######
# Set the environment variables needed for CMake
# to find and use GCC 9 for compilation
######
ENV GCC_HOME "/opt/rh/devtoolset-9"
ENV CC "${GCC_HOME}/root/usr/bin/gcc"
ENV CXX "${GCC_HOME}/root/usr/bin/g++"
ENV SHLVL 1
ENV LD_LIBRARY_PATH "${GCC_HOME}/root/usr/lib64:${GCC_HOME}/root/usr/lib:${GCC_HOME}/root/usr/lib64/dyninst:${GCC_HOME}/root/usr/lib/dyninst:${GCC_HOME}/root/usr/lib64:${GCC_HOME}/root/usr/lib:/usr/lib:/usr/lib64"
ENV PCP_DIR "${GCC_HOME}/root"
ENV MANPATH "${GCC_HOME}/root/usr/share/man:"
ENV PATH "${GCC_HOME}/root/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
ENV PKG_CONFIG_PATH "${GCC_HOME}/root/usr/lib64/pkgconfig"
ENV INFOPATH "${GCC_HOME}/root/usr/share/info"
# TODO: Set locale
######
# Set env vars required to build Hadoop
######
ENV MAVEN_HOME /opt/maven
ENV PATH "${PATH}:${MAVEN_HOME}/bin"
# JAVA_HOME must be set in Maven >= 3.5.0 (MNG-6003)
ENV JAVA_HOME /usr/lib/jvm/java-1.8.0
#######
# Set env vars for SpotBugs
#######
ENV SPOTBUGS_HOME /opt/spotbugs
#######
# Set env vars for Google Protobuf
#######
ENV PROTOBUF_HOME /opt/protobuf
ENV PATH "${PATH}:/opt/protobuf/bin"
######
# Install packages
######
RUN pkg-resolver/install-maven.sh centos:7
RUN pkg-resolver/install-cmake.sh centos:7
RUN pkg-resolver/install-zstandard.sh centos:7
RUN pkg-resolver/install-yasm.sh centos:7
RUN pkg-resolver/install-protobuf.sh centos:7
RUN pkg-resolver/install-boost.sh centos:7
RUN pkg-resolver/install-spotbugs.sh centos:7
RUN pkg-resolver/install-nodejs.sh centos:7
RUN pkg-resolver/install-git.sh centos:7
RUN pkg-resolver/install-common-pkgs.sh

View File

@ -1,118 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Dockerfile for installing the necessary dependencies for building Hadoop.
# See BUILDING.txt.
FROM centos:8
WORKDIR /root
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
######
# Platform package dependency resolver
######
COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
&& chmod a+r pkg-resolver/*.json
######
# Centos 8 has reached its EOL and the packages
# are no longer available on mirror.centos.org site.
# Please see https://www.centos.org/centos-linux-eol/
######
RUN pkg-resolver/set-vault-as-baseurl-centos.sh centos:8
######
# Install packages from yum
######
# hadolint ignore=DL3008,SC2046
RUN yum update -y \
&& yum install -y python3 \
&& yum install -y $(pkg-resolver/resolve.py centos:8)
####
# Install EPEL
####
RUN pkg-resolver/install-epel.sh centos:8
RUN dnf --enablerepo=powertools install -y \
doxygen \
snappy-devel \
yasm
RUN dnf install -y \
bouncycastle \
gcc-toolset-9-gcc \
gcc-toolset-9-gcc-c++ \
libpmem-devel
# Set GCC 9 as the default C/C++ compiler
RUN echo "source /opt/rh/gcc-toolset-9/enable" >> /etc/bashrc
SHELL ["/bin/bash", "--login", "-c"]
######
# Set the environment variables needed for CMake
# to find and use GCC 9 for compilation
######
ENV GCC_HOME "/opt/rh/gcc-toolset-9"
ENV CC "${GCC_HOME}/root/usr/bin/gcc"
ENV CXX "${GCC_HOME}/root/usr/bin/g++"
ENV MODULES_RUN_QUARANTINE "LD_LIBRARY_PATH LD_PRELOAD"
ENV MODULES_CMD "/usr/share/Modules/libexec/modulecmd.tcl"
ENV SHLVL 1
ENV MODULEPATH "/etc/scl/modulefiles:/usr/share/Modules/modulefiles:/etc/modulefiles:/usr/share/modulefiles"
ENV MODULEPATH_modshare "/usr/share/modulefiles:1:/usr/share/Modules/modulefiles:1:/etc/modulefiles:1"
ENV MODULESHOME "/usr/share/Modules"
ENV LD_LIBRARY_PATH "${GCC_HOME}/root/usr/lib64:${GCC_HOME}/root/usr/lib:${GCC_HOME}/root/usr/lib64/dyninst:${GCC_HOME}/root/usr/lib/dyninst:${GCC_HOME}/root/usr/lib64:${GCC_HOME}/root/usr/lib:/usr/lib:/usr/lib64"
ENV PCP_DIR "${GCC_HOME}/root"
ENV MANPATH "${GCC_HOME}/root/usr/share/man::"
ENV PATH "${GCC_HOME}/root/usr/bin:/usr/share/Modules/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
ENV PKG_CONFIG_PATH "${GCC_HOME}/root/usr/lib64/pkgconfig"
ENV INFOPATH "${GCC_HOME}/root/usr/share/info"
# TODO: Set locale
######
# Set env vars required to build Hadoop
######
ENV MAVEN_HOME /opt/maven
ENV PATH "${PATH}:${MAVEN_HOME}/bin"
# JAVA_HOME must be set in Maven >= 3.5.0 (MNG-6003)
ENV JAVA_HOME /usr/lib/jvm/java-1.8.0
#######
# Set env vars for SpotBugs
#######
ENV SPOTBUGS_HOME /opt/spotbugs
#######
# Set env vars for Google Protobuf
#######
ENV PROTOBUF_HOME /opt/protobuf
ENV PATH "${PATH}:/opt/protobuf/bin"
######
# Install packages
######
RUN pkg-resolver/install-maven.sh centos:8
RUN pkg-resolver/install-cmake.sh centos:8
RUN pkg-resolver/install-boost.sh centos:8
RUN pkg-resolver/install-spotbugs.sh centos:8
RUN pkg-resolver/install-protobuf.sh centos:8
RUN pkg-resolver/install-zstandard.sh centos:8
RUN pkg-resolver/install-common-pkgs.sh

View File

@ -1,102 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Dockerfile for installing the necessary dependencies for building Hadoop.
# See BUILDING.txt.
FROM debian:10
WORKDIR /root
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
#####
# Disable suggests/recommends
#####
RUN echo APT::Install-Recommends "0"\; > /etc/apt/apt.conf.d/10disableextras
RUN echo APT::Install-Suggests "0"\; >> /etc/apt/apt.conf.d/10disableextras
ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_TERSE true
######
# Platform package dependency resolver
######
COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/install-pkg-resolver.sh
RUN pkg-resolver/install-pkg-resolver.sh debian:10
######
# Install packages from apt
######
# hadolint ignore=DL3008,SC2046
RUN apt-get -q update \
&& apt-get -q install -y --no-install-recommends $(pkg-resolver/resolve.py debian:10) \
&& echo 'deb http://deb.debian.org/debian bullseye main' >> /etc/apt/sources.list \
&& apt-get -q update \
&& apt-get -q install -y --no-install-recommends -t bullseye $(pkg-resolver/resolve.py --release=bullseye debian:10) \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# TODO : Set locale
######
# Set env vars required to build Hadoop
######
ENV MAVEN_HOME /usr
# JAVA_HOME must be set in Maven >= 3.5.0 (MNG-6003)
ENV JAVA_HOME /usr/lib/jvm/java-11-openjdk-amd64
#######
# Set env vars for SpotBugs 4.2.2
#######
ENV SPOTBUGS_HOME /opt/spotbugs
#######
# Set env vars for Google Protobuf 3.7.1
#######
ENV PROTOBUF_HOME /opt/protobuf
ENV PATH "${PATH}:/opt/protobuf/bin"
###
# Avoid out of memory errors in builds
###
ENV MAVEN_OPTS -Xms256m -Xmx3072m
# Skip gpg verification when downloading Yetus via yetus-wrapper
ENV HADOOP_SKIP_YETUS_VERIFICATION true
####
# Install packages
####
RUN pkg-resolver/install-cmake.sh debian:10
RUN pkg-resolver/install-spotbugs.sh debian:10
RUN pkg-resolver/install-boost.sh debian:10
RUN pkg-resolver/install-protobuf.sh debian:10
RUN pkg-resolver/install-hadolint.sh debian:10
RUN pkg-resolver/install-intel-isa-l.sh debian:10
###
# Everything past this point is either not needed for testing or breaks Yetus.
# So tell Yetus not to read the rest of the file:
# YETUS CUT HERE
###
# Add a welcome message and environment checks.
COPY hadoop_env_checks.sh /root/hadoop_env_checks.sh
RUN chmod 755 /root/hadoop_env_checks.sh
# hadolint ignore=SC2016
RUN echo '${HOME}/hadoop_env_checks.sh' >> /root/.bashrc

View File

@ -1,124 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Dockerfile for installing the necessary dependencies for building Hadoop.
# See BUILDING.txt.
FROM mcr.microsoft.com/windows:ltsc2019
# Need to disable the progress bar for speeding up the downloads.
# hadolint ignore=SC2086
RUN powershell $Global:ProgressPreference = 'SilentlyContinue'
# Restore the default Windows shell for correct batch processing.
SHELL ["cmd", "/S", "/C"]
# Install Visual Studio 2019 Build Tools.
RUN curl -SL --output vs_buildtools.exe https://aka.ms/vs/16/release/vs_buildtools.exe \
&& (start /w vs_buildtools.exe --quiet --wait --norestart --nocache \
--installPath "%ProgramFiles(x86)%\Microsoft Visual Studio\2019\BuildTools" \
--add Microsoft.VisualStudio.Workload.VCTools \
--add Microsoft.VisualStudio.Component.VC.ASAN \
--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 \
--add Microsoft.VisualStudio.Component.Windows10SDK.19041 \
|| IF "%ERRORLEVEL%"=="3010" EXIT 0) \
&& del /q vs_buildtools.exe
# Install Chocolatey.
RUN powershell -NoProfile -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))"
RUN setx PATH "%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
# Install git.
RUN choco install git.install -y
RUN powershell Copy-Item -Recurse -Path 'C:\Program Files\Git' -Destination C:\Git
# Install vcpkg.
# hadolint ignore=DL3003
RUN powershell git clone https://github.com/microsoft/vcpkg.git \
&& cd vcpkg \
&& git checkout 7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d \
&& .\bootstrap-vcpkg.bat
RUN powershell .\vcpkg\vcpkg.exe install boost:x64-windows
RUN powershell .\vcpkg\vcpkg.exe install protobuf:x64-windows
RUN powershell .\vcpkg\vcpkg.exe install openssl:x64-windows
RUN powershell .\vcpkg\vcpkg.exe install zlib:x64-windows
ENV PROTOBUF_HOME "C:\vcpkg\installed\x64-windows"
# Install Azul Java 8 JDK.
RUN powershell Invoke-WebRequest -URI https://cdn.azul.com/zulu/bin/zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -OutFile $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip
RUN powershell Expand-Archive -Path $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -DestinationPath "C:\Java"
ENV JAVA_HOME "C:\Java\zulu8.62.0.19-ca-jdk8.0.332-win_x64"
RUN setx PATH "%PATH%;%JAVA_HOME%\bin"
# Install Apache Maven.
RUN powershell Invoke-WebRequest -URI https://archive.apache.org/dist/maven/maven-3/3.8.6/binaries/apache-maven-3.8.6-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.6-bin.zip
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.6-bin.zip -DestinationPath "C:\Maven"
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.6\bin"
ENV MAVEN_OPTS '-Xmx2048M -Xss128M'
# Install CMake 3.19.0.
RUN powershell Invoke-WebRequest -URI https://cmake.org/files/v3.19/cmake-3.19.0-win64-x64.zip -OutFile $Env:TEMP\cmake-3.19.0-win64-x64.zip
RUN powershell Expand-Archive -Path $Env:TEMP\cmake-3.19.0-win64-x64.zip -DestinationPath "C:\CMake"
RUN setx PATH "%PATH%;C:\CMake\cmake-3.19.0-win64-x64\bin"
# Install zstd 1.5.4.
RUN powershell Invoke-WebRequest -Uri https://github.com/facebook/zstd/releases/download/v1.5.4/zstd-v1.5.4-win64.zip -OutFile $Env:TEMP\zstd-v1.5.4-win64.zip
RUN powershell Expand-Archive -Path $Env:TEMP\zstd-v1.5.4-win64.zip -DestinationPath "C:\ZStd"
RUN setx PATH "%PATH%;C:\ZStd"
# Install libopenssl 3.1.0 needed for rsync 3.2.7.
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libopenssl-3.1.0-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar.zst
RUN powershell zstd -d $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar.zst -o $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar
RUN powershell mkdir "C:\LibOpenSSL"
RUN powershell tar -xvf $Env:TEMP\libopenssl-3.1.0-1-x86_64.pkg.tar -C "C:\LibOpenSSL"
# Install libxxhash 0.8.1 needed for rsync 3.2.7.
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libxxhash-0.8.1-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar.zst
RUN powershell zstd -d $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar.zst -o $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar
RUN powershell mkdir "C:\LibXXHash"
RUN powershell tar -xvf $Env:TEMP\libxxhash-0.8.1-1-x86_64.pkg.tar -C "C:\LibXXHash"
# Install libzstd 1.5.4 needed for rsync 3.2.7.
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/libzstd-1.5.4-1-x86_64.pkg.tar.zst -OutFile $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar.zst
RUN powershell zstd -d $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar.zst -o $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar
RUN powershell mkdir "C:\LibZStd"
RUN powershell tar -xvf $Env:TEMP\libzstd-1.5.4-1-x86_64.pkg.tar -C "C:\LibZStd"
# Install rsync 3.2.7.
RUN powershell Invoke-WebRequest -Uri https://repo.msys2.org/msys/x86_64/rsync-3.2.7-2-x86_64.pkg.tar.zst -OutFile $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar.zst
RUN powershell zstd -d $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar.zst -o $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar
RUN powershell mkdir "C:\RSync"
RUN powershell tar -xvf $Env:TEMP\rsync-3.2.7-2-x86_64.pkg.tar -C "C:\RSync"
# Copy the dependencies of rsync 3.2.7.
RUN powershell Copy-Item -Path "C:\LibOpenSSL\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
RUN powershell Copy-Item -Path "C:\LibXXHash\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
RUN powershell Copy-Item -Path "C:\LibZStd\usr\bin\*.dll" -Destination "C:\Program` Files\Git\usr\bin"
RUN powershell Copy-Item -Path "C:\RSync\usr\bin\*" -Destination "C:\Program` Files\Git\usr\bin"
# Install Python 3.10.11.
RUN powershell Invoke-WebRequest -Uri https://www.python.org/ftp/python/3.10.11/python-3.10.11-embed-amd64.zip -OutFile $Env:TEMP\python-3.10.11-embed-amd64.zip
RUN powershell Expand-Archive -Path $Env:TEMP\python-3.10.11-embed-amd64.zip -DestinationPath "C:\Python3"
RUN powershell New-Item -ItemType HardLink -Value "C:\Python3\python.exe" -Path "C:\Python3\python3.exe"
RUN setx path "%PATH%;C:\Python3"
# We get strange Javadoc errors without this.
RUN setx classpath ""
RUN git config --global core.longpaths true
RUN setx PATH "%PATH%;C:\Program Files\Git\usr\bin"
# Define the entry point for the docker container.
ENTRYPOINT ["C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC\\Auxiliary\\Build\\vcvars64.bat", "&&", "cmd.exe"]

View File

@ -1,114 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
# Docker images for building Hadoop
This folder contains the Dockerfiles for building Hadoop on various platforms.
# Dependency management
The mode of installation of the dependencies needed for building Hadoop varies from one platform to
the other. Different platforms have different toolchains. Some packages tend to be polymorphic
across platforms and most commonly, a package that's readily available in one platform's toolchain
isn't available on another. We thus, resort to building and installing the package from source,
causing duplication of code since this needs to be done for all the Dockerfiles pertaining to all
the platforms. We need a system to track a dependency - for a package - for a platform
- (and optionally) for a release. Thus, there's a lot of diversity that needs to be handled for
managing package dependencies and
`pkg-resolver` caters to that.
## Supported platforms
`pkg-resolver/platforms.json` contains a list of the supported platforms for dependency management.
## Package dependencies
`pkg-resolver/packages.json` maps a dependency to a given platform. Here's the schema of this JSON.
```json
{
"dependency_1": {
"platform_1": "package_1",
"platform_2": [
"package_1",
"package_2"
]
},
"dependency_2": {
"platform_1": [
"package_1",
"package_2",
"package_3"
]
},
"dependency_3": {
"platform_1": {
"release_1": "package_1_1_1",
"release_2": [
"package_1_2_1",
"package_1_2_2"
]
},
"platform_2": [
"package_2_1",
{
"release_1": "package_2_1_1"
}
]
}
}
```
The root JSON element contains unique _dependency_ children. This in turn contains the name of the _
platforms_ and the list of _packages_ to be installed for that platform. Just to give an example of
how to interpret the above JSON -
1. For `dependency_1`, `package_1` needs to be installed for `platform_1`.
2. For `dependency_2`, `package_1` and `package_2` needs to be installed for `platform_2`.
3. For `dependency_2`, `package_1`, `package_3` and `package_3` needs to be installed for
`platform_1`.
4. For `dependency_3`, `package_1_1_1` gets installed only if `release_1` has been specified
for `platform_1`.
5. For `dependency_3`, the packages `package_1_2_1` and `package_1_2_2` gets installed only
if `release_2` has been specified for `platform_1`.
6. For `dependency_3`, for `platform_2`, `package_2_1` is always installed, but `package_2_1_1` gets
installed only if `release_1` has been specified.
### Tool help
```shell
$ pkg-resolver/resolve.py -h
usage: resolve.py [-h] [-r RELEASE] platform
Platform package dependency resolver for building Apache Hadoop
positional arguments:
platform The name of the platform to resolve the dependencies for
optional arguments:
-h, --help show this help message and exit
-r RELEASE, --release RELEASE
The release label to filter the packages for the given platform
```
## Standalone packages
Most commonly, some packages are not available across the toolchains in various platforms. Thus, we
would need to build and install them. Since we need to do this across all the Dockerfiles for all
the platforms, it could lead to code duplication and managing them becomes a hassle. Thus, we put
the build steps in a `pkg-resolver/install-<package>.sh` and invoke this in all the Dockerfiles.

View File

@ -1,50 +0,0 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Checks whether the given platform is supported for building Apache Hadoop
"""
import json
import sys
def get_platforms():
"""
:return: A list of the supported platforms managed by pkg-resolver.
"""
with open('pkg-resolver/platforms.json', encoding='utf-8', mode='r') as platforms_file:
return json.loads(platforms_file.read())
def is_supported_platform(platform):
"""
:param platform: The name of the platform
:return: Whether the platform is supported
"""
return platform in get_platforms()
if __name__ == '__main__':
if len(sys.argv) != 2:
print('ERROR: Expecting 1 argument, {} were provided'.format(len(sys.argv) - 1),
file=sys.stderr)
sys.exit(1)
sys.exit(0 if is_supported_platform(sys.argv[1]) else 1)

View File

@ -1,56 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="1.72.0"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "1.72.0" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "1.72.0" ]; then
# hadolint ignore=DL3003
mkdir -p /opt/boost-library &&
curl -L https://sourceforge.net/projects/boost/files/boost/1.72.0/boost_1_72_0.tar.bz2/download >boost_1_72_0.tar.bz2 &&
mv boost_1_72_0.tar.bz2 /opt/boost-library &&
cd /opt/boost-library &&
tar --bzip2 -xf boost_1_72_0.tar.bz2 &&
cd /opt/boost-library/boost_1_72_0 &&
./bootstrap.sh --prefix=/usr/ &&
./b2 --without-python install &&
cd /root &&
rm -rf /opt/boost-library
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,53 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="3.19.0"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "3.19.0" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "3.19.0" ]; then
# hadolint ignore=DL3003
mkdir -p /tmp/cmake /opt/cmake &&
curl -L -s -S https://cmake.org/files/v3.19/cmake-3.19.0.tar.gz -o /tmp/cmake/cmake-3.19.0.tar.gz &&
tar xzf /tmp/cmake/cmake-3.19.0.tar.gz --strip-components 1 -C /opt/cmake &&
cd /opt/cmake || exit && ./bootstrap &&
make "-j$(nproc)" &&
make install &&
cd /root || exit
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,22 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
######
# Install pylint and python-dateutil
######
pip3 install pylint==2.6.0 python-dateutil==2.8.1

View File

@ -1,49 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="8"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "8" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "8" ]; then
mkdir -p /tmp/epel &&
curl -L -s -S https://download-ib01.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm \
-o /tmp/epel/epel-release-latest-8.noarch.rpm &&
rpm -Uvh /tmp/epel/epel-release-latest-8.noarch.rpm
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,55 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="2.9.5"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "2.9.5" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "2.9.5" ]; then
# hadolint ignore=DL3003
mkdir -p /tmp/git /opt/git &&
curl -L -s -S https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.9.5.tar.gz >/tmp/git/git-2.9.5.tar.gz &&
tar xzf /tmp/git/git-2.9.5.tar.gz --strip-components 1 -C /opt/git &&
cd /opt/git || exit &&
make configure &&
./configure --prefix=/usr/local &&
make "-j$(nproc)" &&
make install &&
cd /root || exit
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,35 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
curl -L -s -S \
https://github.com/hadolint/hadolint/releases/download/v1.11.1/hadolint-Linux-x86_64 \
-o /bin/hadolint &&
chmod a+rx /bin/hadolint &&
shasum -a 512 /bin/hadolint |
awk '$1!="734e37c1f6619cbbd86b9b249e69c9af8ee1ea87a2b1ff71dccda412e9dac35e63425225a95d71572091a3f0a11e9a04c2fc25d9e91b840530c26af32b9891ca" {exit(1)}'

View File

@ -1,58 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="2.29.0"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "2.29.0" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "2.29.0" ]; then
# hadolint ignore=DL3003,DL3008
mkdir -p /opt/isa-l-src &&
curl -L -s -S \
https://github.com/intel/isa-l/archive/v2.29.0.tar.gz \
-o /opt/isa-l.tar.gz &&
tar xzf /opt/isa-l.tar.gz --strip-components 1 -C /opt/isa-l-src &&
cd /opt/isa-l-src &&
./autogen.sh &&
./configure &&
make "-j$(nproc)" &&
make install &&
cd /root &&
rm -rf /opt/isa-l-src
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,49 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="3.6.3"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "3.6.3" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "3.6.3" ]; then
mkdir -p /opt/maven /tmp/maven &&
curl -L -s -S https://dlcdn.apache.org/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz \
-o /tmp/maven/apache-maven-3.6.3-bin.tar.gz &&
tar xzf /tmp/maven/apache-maven-3.6.3-bin.tar.gz --strip-components 1 -C /opt/maven
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,54 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="14.16.1"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "14.16.1" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "14.16.1" ]; then
# hadolint ignore=DL3003
mkdir -p /tmp/node &&
curl -L -s -S https://nodejs.org/dist/v14.16.1/node-v14.16.1.tar.gz -o /tmp/node-v14.16.1.tar.gz &&
tar xzf /tmp/node-v14.16.1.tar.gz --strip-components 1 -C /tmp/node &&
cd /tmp/node || exit &&
./configure &&
make "-j$(nproc)" &&
make install &&
cd /root || exit
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,39 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: No platform specified, please specify one"
exit 1
fi
chmod a+x pkg-resolver/*.sh pkg-resolver/*.py
chmod a+r pkg-resolver/*.json
if [ "$1" == "debian:10" ]; then
apt-get -q update
apt-get -q install -y --no-install-recommends python3 \
python3-pip \
python3-pkg-resources \
python3-setuptools \
python3-wheel
pip3 install pylint==2.6.0 python-dateutil==2.8.1
else
# Need to add the code for the rest of the platforms - HADOOP-17920
echo "ERROR: The given platform $1 is not yet supported or is invalid"
exit 1
fi

View File

@ -1,57 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="3.7.1"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "3.7.1" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "3.7.1" ]; then
# hadolint ignore=DL3003
mkdir -p /opt/protobuf-src &&
curl -L -s -S \
https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \
-o /opt/protobuf.tar.gz &&
tar xzf /opt/protobuf.tar.gz --strip-components 1 -C /opt/protobuf-src &&
cd /opt/protobuf-src &&
./configure --prefix=/opt/protobuf &&
make "-j$(nproc)" &&
make install &&
cd /root &&
rm -rf /opt/protobuf-src
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,50 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="4.2.2"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "4.2.2" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "4.2.2" ]; then
mkdir -p /opt/spotbugs &&
curl -L -s -S https://github.com/spotbugs/spotbugs/releases/download/4.2.2/spotbugs-4.2.2.tgz \
-o /opt/spotbugs.tgz &&
tar xzf /opt/spotbugs.tgz --strip-components 1 -C /opt/spotbugs &&
chmod +x /opt/spotbugs/bin/*
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,49 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="1.2.0-4"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "1.2.0-4" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "1.2.0-4" ]; then
mkdir -p /tmp/yasm &&
curl -L -s -S https://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/y/yasm-1.2.0-4.el7.x86_64.rpm \
-o /tmp/yasm-1.2.0-4.el7.x86_64.rpm &&
rpm -Uvh /tmp/yasm-1.2.0-4.el7.x86_64.rpm
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,53 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
pkg-resolver/check_platform.py "$1"
if [ $? -eq 1 ]; then
echo "ERROR: Unsupported platform $1"
exit 1
fi
default_version="1.4.9"
version_to_install=$default_version
if [ -n "$2" ]; then
version_to_install="$2"
fi
if [ "$version_to_install" != "1.4.9" ]; then
echo "WARN: Don't know how to install version $version_to_install, installing the default version $default_version instead"
version_to_install=$default_version
fi
if [ "$version_to_install" == "1.4.9" ]; then
# hadolint ignore=DL3003
mkdir -p /opt/zstd /tmp/zstd &&
curl -L -s -S https://github.com/facebook/zstd/archive/refs/tags/v1.4.9.tar.gz -o /tmp/zstd/v1.4.9.tar.gz &&
tar xzf /tmp/zstd/v1.4.9.tar.gz --strip-components 1 -C /opt/zstd &&
cd /opt/zstd || exit &&
make "-j$(nproc)" &&
make install &&
cd /root || exit
else
echo "ERROR: Don't know how to install version $version_to_install"
exit 1
fi

View File

@ -1,360 +0,0 @@
{
"ant": {
"debian:10": "ant",
"ubuntu:focal": "ant",
"ubuntu:focal::arch64": "ant",
"centos:7": "ant",
"centos:8": "ant"
},
"apt-utils": {
"debian:10": "apt-utils",
"ubuntu:focal": "apt-utils",
"ubuntu:focal::arch64": "apt-utils"
},
"automake": {
"debian:10": "automake",
"ubuntu:focal": "automake",
"ubuntu:focal::arch64": "automake",
"centos:7": "automake",
"centos:8": "automake"
},
"autoconf": {
"centos:7": "autoconf"
},
"bats": {
"debian:10": "bats",
"ubuntu:focal": "bats",
"ubuntu:focal::arch64": "bats"
},
"build-essential": {
"debian:10": "build-essential",
"ubuntu:focal": "build-essential",
"ubuntu:focal::arch64": "build-essential",
"centos:7": "build-essential"
},
"bzip2": {
"debian:10": [
"bzip2",
"libbz2-dev"
],
"ubuntu:focal": [
"bzip2",
"libbz2-dev"
],
"ubuntu:focal::arch64": [
"bzip2",
"libbz2-dev"
],
"centos:7": [
"bzip2",
"bzip2-devel"
],
"centos:8": [
"bzip2",
"bzip2-devel"
]
},
"clang": {
"debian:10": "clang",
"ubuntu:focal": "clang",
"ubuntu:focal::arch64": "clang",
"centos:7": "clang",
"centos:8": "clang"
},
"cmake": {
"ubuntu:focal": "cmake",
"ubuntu:focal::arch64": "cmake"
},
"curl": {
"debian:10": [
"curl",
"libcurl4-openssl-dev"
],
"ubuntu:focal": [
"curl",
"libcurl4-openssl-dev"
],
"ubuntu:focal::arch64": [
"curl",
"libcurl4-openssl-dev"
],
"centos:7": [
"curl",
"libcurl-devel"
],
"centos:8": [
"curl",
"libcurl-devel"
]
},
"doxygen": {
"debian:10": "doxygen",
"ubuntu:focal": "doxygen",
"ubuntu:focal::arch64": "doxygen",
"centos:7": "doxygen"
},
"dnf": {
"centos:8": "dnf"
},
"fuse": {
"debian:10": [
"fuse",
"libfuse-dev"
],
"ubuntu:focal": [
"fuse",
"libfuse-dev"
],
"ubuntu:focal::arch64": [
"fuse",
"libfuse-dev"
],
"centos:7": [
"fuse",
"fuse-libs",
"fuse-devel"
],
"centos:8": [
"fuse",
"fuse-libs",
"fuse-devel"
]
},
"gcc": {
"debian:10": {
"bullseye": [
"gcc",
"g++"
]
},
"ubuntu:focal": [
"gcc",
"g++"
],
"ubuntu:focal::arch64": [
"gcc",
"g++"
],
"centos:7": [
"centos-release-scl",
"devtoolset-9"
]
},
"gettext": {
"centos:7": "gettext-devel"
},
"git": {
"debian:10": "git",
"ubuntu:focal": "git",
"ubuntu:focal::arch64": "git",
"centos:8": "git"
},
"gnupg-agent": {
"debian:10": "gnupg-agent",
"ubuntu:focal": "gnupg-agent",
"ubuntu:focal::arch64": "gnupg-agent"
},
"hugo": {
"debian:10": "hugo",
"ubuntu:focal": "hugo",
"ubuntu:focal::arch64": "hugo"
},
"libbcprov-java": {
"debian:10": "libbcprov-java",
"ubuntu:focal": "libbcprov-java",
"ubuntu:focal::arch64": "libbcprov-java"
},
"libtool": {
"debian:10": "libtool",
"ubuntu:focal": "libtool",
"ubuntu:focal::arch64": "libtool",
"centos:7": "libtool",
"centos:8": "libtool"
},
"openssl": {
"debian:10": "libssl-dev",
"ubuntu:focal": "libssl-dev",
"ubuntu:focal::arch64": "libssl-dev",
"centos:7": "openssl-devel",
"centos:8": "openssl-devel"
},
"perl": {
"centos:7": [
"perl-CPAN",
"perl-devel"
]
},
"protocol-buffers": {
"debian:10": [
"libprotobuf-dev",
"libprotoc-dev"
],
"ubuntu:focal": [
"libprotobuf-dev",
"libprotoc-dev"
],
"ubuntu:focal::arch64": [
"libprotobuf-dev",
"libprotoc-dev"
]
},
"sasl": {
"debian:10": "libsasl2-dev",
"ubuntu:focal": "libsasl2-dev",
"ubuntu:focal::arch64": "libsasl2-dev",
"centos:7": "cyrus-sasl-devel",
"centos:8": "cyrus-sasl-devel"
},
"snappy": {
"debian:10": "libsnappy-dev",
"ubuntu:focal": "libsnappy-dev",
"ubuntu:focal::arch64": "libsnappy-dev",
"centos:7": "snappy-devel"
},
"zlib": {
"debian:10": [
"libzstd-dev",
"zlib1g-dev"
],
"ubuntu:focal": [
"libzstd-dev",
"zlib1g-dev"
],
"ubuntu:focal::arch64": [
"libzstd-dev",
"zlib1g-dev"
],
"centos:7": [
"zlib-devel",
"lz4-devel"
],
"centos:8": [
"zlib-devel",
"lz4-devel"
]
},
"locales": {
"debian:10": "locales",
"ubuntu:focal": "locales",
"ubuntu:focal::arch64": "locales"
},
"libtirpc-devel": {
"centos:7": "libtirpc-devel",
"centos:8": "libtirpc-devel"
},
"libpmem": {
"centos:7": "libpmem-devel"
},
"make": {
"debian:10": "make",
"ubuntu:focal": "make",
"ubuntu:focal::arch64": "make",
"centos:7": "make",
"centos:8": "make"
},
"maven": {
"debian:10": "maven",
"ubuntu:focal": "maven",
"ubuntu:focal::arch64": "maven"
},
"java": {
"debian:10": "openjdk-11-jdk",
"ubuntu:focal": [
"openjdk-8-jdk",
"openjdk-11-jdk"
],
"ubuntu:focal::arch64": [
"openjdk-8-jdk",
"openjdk-11-jdk"
]
},
"pinentry-curses": {
"debian:10": "pinentry-curses",
"ubuntu:focal": "pinentry-curses",
"ubuntu:focal::arch64": "pinentry-curses",
"centos:7": "pinentry-curses",
"centos:8": "pinentry-curses"
},
"pkg-config": {
"debian:10": "pkg-config",
"ubuntu:focal": "pkg-config",
"ubuntu:focal::arch64": "pkg-config",
"centos:8": "pkg-config"
},
"python": {
"debian:10": [
"python3",
"python3-pip",
"python3-pkg-resources",
"python3-setuptools",
"python3-wheel"
],
"ubuntu:focal": [
"python3",
"python3-pip",
"python3-pkg-resources",
"python3-setuptools",
"python3-wheel"
],
"ubuntu:focal::arch64": [
"python2.7",
"python3",
"python3-pip",
"python3-pkg-resources",
"python3-setuptools",
"python3-wheel"
],
"centos:7": [
"python3",
"python3-pip",
"python3-setuptools",
"python3-wheel"
],
"centos:8": [
"python3",
"python3-pip",
"python3-setuptools",
"python3-wheel"
]
},
"rsync": {
"debian:10": "rsync",
"ubuntu:focal": "rsync",
"ubuntu:focal::arch64": "rsync",
"centos:7": "rsync",
"centos:8": "rsync"
},
"shellcheck": {
"debian:10": "shellcheck",
"ubuntu:focal": "shellcheck",
"ubuntu:focal::arch64": "shellcheck"
},
"shasum": {
"centos:7": "perl-Digest-SHA",
"centos:8": "perl-Digest-SHA"
},
"software-properties-common": {
"debian:10": "software-properties-common",
"ubuntu:focal": "software-properties-common",
"ubuntu:focal::arch64": "software-properties-common"
},
"sudo": {
"debian:10": "sudo",
"ubuntu:focal": "sudo",
"ubuntu:focal::arch64": "sudo",
"centos:7": "sudo",
"centos:8": "sudo"
},
"valgrind": {
"debian:10": "valgrind",
"ubuntu:focal": "valgrind",
"ubuntu:focal::arch64": "valgrind",
"centos:7": "valgrind",
"centos:8": "valgrind"
},
"yasm": {
"debian:10": "yasm",
"ubuntu:focal": "yasm",
"ubuntu:focal::arch64": "yasm"
}
}

View File

@ -1,7 +0,0 @@
[
"ubuntu:focal",
"ubuntu:focal::arch64",
"centos:7",
"centos:8",
"debian:10"
]

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Platform package dependency resolver for building Apache Hadoop.
"""
import argparse
import json
import sys
from check_platform import is_supported_platform
def get_packages(platform, release=None):
"""
Resolve and get the list of packages to install for the given platform.
:param platform: The platform for which the packages needs to be resolved.
:param release: An optional parameter that filters the packages of the given platform for the
specified release.
:return: A list of resolved packages to install.
"""
with open('pkg-resolver/packages.json', encoding='utf-8', mode='r') as pkg_file:
pkgs = json.loads(pkg_file.read())
packages = []
def process_package(package, in_release=False):
"""
Processes the given package object that belongs to a platform and adds it to the packages
list variable in the parent scope.
In essence, this method recursively traverses the JSON structure defined in packages.json
and performs the core filtering.
:param package: The package object to process.
:param in_release: A boolean that indicates whether the current travels belongs to a package
that needs to be filtered for the given release label.
"""
if isinstance(package, list):
for entry in package:
process_package(entry, in_release)
elif isinstance(package, dict):
if release is None:
return
for entry in package.get(release, []):
process_package(entry, in_release=True)
elif isinstance(package, str):
# Filter out the package that doesn't belong to this release,
# if a release label has been specified.
if release is not None and not in_release:
return
packages.append(package)
else:
raise Exception('Unknown package of type: {}'.format(type(package)))
for platforms in filter(lambda x: x.get(platform) is not None, pkgs.values()):
process_package(platforms.get(platform))
return packages
if __name__ == '__main__':
if len(sys.argv) < 2:
print('ERROR: Need at least 1 argument, {} were provided'.format(len(sys.argv) - 1),
file=sys.stderr)
sys.exit(1)
arg_parser = argparse.ArgumentParser(
description='Platform package dependency resolver for building Apache Hadoop')
arg_parser.add_argument('-r', '--release', nargs=1, type=str,
help='The release label to filter the packages for the given platform')
arg_parser.add_argument('platform', nargs=1, type=str,
help='The name of the platform to resolve the dependencies for')
args = arg_parser.parse_args()
if not is_supported_platform(args.platform[0]):
print(
'ERROR: The given platform {} is not supported. '
'Please refer to platforms.json for a list of supported platforms'.format(
args.platform), file=sys.stderr)
sys.exit(1)
packages_to_install = get_packages(args.platform[0],
args.release[0] if args.release is not None else None)
print(' '.join(packages_to_install))

View File

@ -1,33 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi
if [ "$1" == "centos:7" ] || [ "$1" == "centos:8" ]; then
cd /etc/yum.repos.d/ || exit &&
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* &&
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* &&
yum update -y &&
cd /root || exit
else
echo "ERROR: Setting the archived baseurl is only supported for centos 7 and 8 environments"
exit 1
fi

View File

@ -1,134 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
Apache Hadoop Git/Jira FixVersion validation
============================================================
Git commits in Apache Hadoop contains Jira number of the format
HADOOP-XXXX or HDFS-XXXX or YARN-XXXX or MAPREDUCE-XXXX.
While creating a release candidate, we also include changelist
and this changelist can be identified based on Fixed/Closed Jiras
with the correct fix versions. However, sometimes we face few
inconsistencies between fixed Jira and Git commit message.
git_jira_fix_version_check.py script takes care of
identifying all git commits with commit
messages with any of these issues:
1. commit is reverted as per commit message
2. commit does not contain Jira number format in message
3. Jira does not have expected fixVersion
4. Jira has expected fixVersion, but it is not yet resolved
Moreover, this script also finds any resolved Jira with expected
fixVersion but without any corresponding commit present.
This should be useful as part of RC preparation.
git_jira_fix_version_check supports python3 and it required
installation of jira:
```
$ python3 --version
Python 3.9.7
$ python3 -m venv ./venv
$ ./venv/bin/pip install -r dev-support/git-jira-validation/requirements.txt
$ ./venv/bin/python dev-support/git-jira-validation/git_jira_fix_version_check.py
```
The script also requires below inputs:
```
1. First commit hash to start excluding commits from history:
Usually we can provide latest commit hash from last tagged release
so that the script will only loop through all commits in git commit
history before this commit hash. e.g for 3.3.2 release, we can provide
git hash: fa4915fdbbbec434ab41786cb17b82938a613f16
because this commit bumps up hadoop pom versions to 3.3.2:
https://github.com/apache/hadoop/commit/fa4915fdbbbec434ab41786cb17b82938a613f16
2. Fix Version:
Exact fixVersion that we would like to compare all Jira's fixVersions
with. e.g for 3.3.2 release, it should be 3.3.2.
3. JIRA Project Name:
The exact name of Project as case-sensitive e.g HADOOP / OZONE
4. Path of project's working dir with release branch checked-in:
Path of project from where we want to compare git hashes from. Local fork
of the project should be up-to date with upstream and expected release
branch should be checked-in.
5. Jira server url (default url: https://issues.apache.org/jira):
Default value of server points to ASF Jiras but this script can be
used outside of ASF Jira too.
```
Example of script execution:
```
JIRA Project Name (e.g HADOOP / OZONE etc): HADOOP
First commit hash to start excluding commits from history: fa4915fdbbbec434ab41786cb17b82938a613f16
Fix Version: 3.3.2
Jira server url (default: https://issues.apache.org/jira):
Path of project's working dir with release branch checked-in: /Users/vjasani/Documents/src/hadoop-3.3/hadoop
Check git status output and verify expected branch
On branch branch-3.3.2
Your branch is up to date with 'origin/branch-3.3.2'.
nothing to commit, working tree clean
Jira/Git commit message diff starting: ##############################################
Jira not present with version: 3.3.2. Commit: 8cd8e435fb43a251467ca74fadcb14f21a3e8163 HADOOP-17198. Support S3 Access Points (#3260) (branch-3.3.2) (#3955)
WARN: Jira not found. Commit: 8af28b7cca5c6020de94e739e5373afc69f399e5 Updated the index as per 3.3.2 release
WARN: Jira not found. Commit: e42e483d0085aa46543ebcb1196dd155ddb447d0 Make upstream aware of 3.3.1 release
Commit seems reverted. Commit: 6db1165380cd308fb74c9d17a35c1e57174d1e09 Revert "HDFS-14099. Unknown frame descriptor when decompressing multiple frames (#3836)"
Commit seems reverted. Commit: 1e3f94fa3c3d4a951d4f7438bc13e6f008f228f4 Revert "HDFS-16333. fix balancer bug when transfer an EC block (#3679)"
Jira not present with version: 3.3.2. Commit: ce0bc7b473a62a580c1227a4de6b10b64b045d3a HDFS-16344. Improve DirectoryScanner.Stats#toString (#3695)
Jira not present with version: 3.3.2. Commit: 30f0629d6e6f735c9f4808022f1a1827c5531f75 HDFS-16339. Show the threshold when mover threads quota is exceeded (#3689)
Jira not present with version: 3.3.2. Commit: e449daccf486219e3050254d667b74f92e8fc476 YARN-11007. Correct words in YARN documents (#3680)
Commit seems reverted. Commit: 5c189797828e60a3329fd920ecfb99bcbccfd82d Revert "HDFS-16336. Addendum: De-flake TestRollingUpgrade#testRollback (#3686)"
Jira not present with version: 3.3.2. Commit: 544dffd179ed756bc163e4899e899a05b93d9234 HDFS-16171. De-flake testDecommissionStatus (#3280)
Jira not present with version: 3.3.2. Commit: c6914b1cb6e4cab8263cd3ae5cc00bc7a8de25de HDFS-16350. Datanode start time should be set after RPC server starts successfully (#3711)
Jira not present with version: 3.3.2. Commit: 328d3b84dfda9399021ccd1e3b7afd707e98912d HDFS-16336. Addendum: De-flake TestRollingUpgrade#testRollback (#3686)
Jira not present with version: 3.3.2. Commit: 3ae8d4ccb911c9ababd871824a2fafbb0272c016 HDFS-16336. De-flake TestRollingUpgrade#testRollback (#3686)
Jira not present with version: 3.3.2. Commit: 15d3448e25c797b7d0d401afdec54683055d4bb5 HADOOP-17975. Fallback to simple auth does not work for a secondary DistributedFileSystem instance. (#3579)
Jira not present with version: 3.3.2. Commit: dd50261219de71eaa0a1ad28529953e12dfb92e0 YARN-10991. Fix to ignore the grouping "[]" for resourcesStr in parseResourcesString method (#3592)
Jira not present with version: 3.3.2. Commit: ef462b21bf03b10361d2f9ea7b47d0f7360e517f HDFS-16332. Handle invalid token exception in sasl handshake (#3677)
WARN: Jira not found. Commit: b55edde7071419410ea5bea4ce6462b980e48f5b Also update hadoop.version to 3.3.2
...
...
...
Found first commit hash after which git history is redundant. commit: fa4915fdbbbec434ab41786cb17b82938a613f16
Exiting successfully
Jira/Git commit message diff completed: ##############################################
Any resolved Jira with fixVersion 3.3.2 but corresponding commit not present
Starting diff: ##############################################
HADOOP-18066 is marked resolved with fixVersion 3.3.2 but no corresponding commit found
HADOOP-17936 is marked resolved with fixVersion 3.3.2 but no corresponding commit found
Completed diff: ##############################################
```

View File

@ -1,117 +0,0 @@
#!/usr/bin/env python3
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
"""An application to assist Release Managers with ensuring that histories in
Git and fixVersions in JIRA are in agreement. See README.md for a detailed
explanation.
"""
import os
import re
import subprocess
from jira import JIRA
jira_project_name = input("JIRA Project Name (e.g HADOOP / OZONE etc): ") \
or "HADOOP"
# Define project_jira_keys with - appended. e.g for HADOOP Jiras,
# project_jira_keys should include HADOOP-, HDFS-, YARN-, MAPREDUCE-
project_jira_keys = [jira_project_name + '-']
if jira_project_name == 'HADOOP':
project_jira_keys.append('HDFS-')
project_jira_keys.append('YARN-')
project_jira_keys.append('MAPREDUCE-')
first_exclude_commit_hash = input("First commit hash to start excluding commits from history: ")
fix_version = input("Fix Version: ")
jira_server_url = input(
"Jira server url (default: https://issues.apache.org/jira): ") \
or "https://issues.apache.org/jira"
jira = JIRA(server=jira_server_url)
local_project_dir = input("Path of project's working dir with release branch checked-in: ")
os.chdir(local_project_dir)
GIT_STATUS_MSG = subprocess.check_output(['git', 'status']).decode("utf-8")
print('\nCheck git status output and verify expected branch\n')
print(GIT_STATUS_MSG)
print('\nJira/Git commit message diff starting: ##############################################')
issue_set_from_commit_msg = set()
for commit in subprocess.check_output(['git', 'log', '--pretty=oneline']).decode(
"utf-8").splitlines():
if commit.startswith(first_exclude_commit_hash):
print("Found first commit hash after which git history is redundant. commit: "
+ first_exclude_commit_hash)
print("Exiting successfully")
break
if re.search('revert', commit, re.IGNORECASE):
print("Commit seems reverted. \t\t\t Commit: " + commit)
continue
ACTUAL_PROJECT_JIRA = None
matches = re.findall('|'.join(project_jira_keys), commit)
if matches:
ACTUAL_PROJECT_JIRA = matches[0]
if not ACTUAL_PROJECT_JIRA:
print("WARN: Jira not found. \t\t\t Commit: " + commit)
continue
JIRA_NUM = ''
for c in commit.split(ACTUAL_PROJECT_JIRA)[1]:
if c.isdigit():
JIRA_NUM = JIRA_NUM + c
else:
break
issue = jira.issue(ACTUAL_PROJECT_JIRA + JIRA_NUM)
EXPECTED_FIX_VERSION = False
for version in issue.fields.fixVersions:
if version.name == fix_version:
EXPECTED_FIX_VERSION = True
break
if not EXPECTED_FIX_VERSION:
print("Jira not present with version: " + fix_version + ". \t Commit: " + commit)
continue
if issue.fields.status is None or issue.fields.status.name not in ('Resolved', 'Closed'):
print("Jira is not resolved yet? \t\t Commit: " + commit)
else:
# This means Jira corresponding to current commit message is resolved with expected
# fixVersion.
# This is no-op by default, if needed, convert to print statement.
issue_set_from_commit_msg.add(ACTUAL_PROJECT_JIRA + JIRA_NUM)
print('Jira/Git commit message diff completed: ##############################################')
print('\nAny resolved Jira with fixVersion ' + fix_version
+ ' but corresponding commit not present')
print('Starting diff: ##############################################')
all_issues_with_fix_version = jira.search_issues(
'project=' + jira_project_name + ' and status in (Resolved,Closed) and fixVersion='
+ fix_version)
for issue in all_issues_with_fix_version:
if issue.key not in issue_set_from_commit_msg:
print(issue.key + ' is marked resolved with fixVersion ' + fix_version
+ ' but no corresponding commit found')
print('Completed diff: ##############################################')

View File

@ -1,18 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
jira==3.1.1

View File

@ -1,204 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This script is useful to perform basic sanity tests for the given
# Hadoop RC. It checks for the Checksum, Signature, Rat check,
# Build from source and building tarball from the source.
set -e -o pipefail
usage() {
SCRIPT=$(basename "${BASH_SOURCE[@]}")
cat << __EOF
hadoop-vote. A script for standard vote which verifies the following items
1. Checksum of sources and binaries
2. Signature of sources and binaries
3. Rat check
4. Built from source
5. Built tar from source
Usage: ${SCRIPT} -s | --source <url> [-k | --key <signature>] [-f | --keys-file-url <url>] [-o | --output-dir </path/to/use>] [-D property[=value]] [-P profiles]
${SCRIPT} -h | --help
-h | --help Show this screen.
-s | --source '<url>' A URL pointing to the release candidate sources and binaries
e.g. https://dist.apache.org/repos/dist/dev/hadoop/hadoop-<version>RC0/
-k | --key '<signature>' A signature of the public key, e.g. 9AD2AE49
-f | --keys-file-url '<url>' the URL of the key file, default is
https://downloads.apache.org/hadoop/common/KEYS
-o | --output-dir '</path>' directory which has the stdout and stderr of each verification target
-D | list of maven properties to set for the mvn invocations, e.g. <-D hbase.profile=2.0 -D skipTests> Defaults to unset
-P | list of maven profiles to set for the build from source, e.g. <-P native -P yarn-ui>
__EOF
}
MVN_PROPERTIES=()
MVN_PROFILES=()
while ((${#})); do
case "${1}" in
-h | --help )
usage; exit 0 ;;
-s | --source )
SOURCE_URL="${2}"; shift 2 ;;
-k | --key )
SIGNING_KEY="${2}"; shift 2 ;;
-f | --keys-file-url )
KEY_FILE_URL="${2}"; shift 2 ;;
-o | --output-dir )
OUTPUT_DIR="${2}"; shift 2 ;;
-D )
MVN_PROPERTIES+=("-D ${2}"); shift 2 ;;
-P )
MVN_PROFILES+=("-P ${2}"); shift 2 ;;
* )
usage >&2; exit 1 ;;
esac
done
# Source url must be provided
if [ -z "${SOURCE_URL}" ]; then
usage;
exit 1
fi
cat << __EOF
Although This tool helps verifying Hadoop RC build and unit tests,
operator may still consider verifying the following manually:
1. Verify the API compatibility report
2. Integration/performance/benchmark tests
3. Object store specific Integration tests against an endpoint
4. Verify overall unit test stability from Jenkins builds or locally
5. Other concerns if any
__EOF
[[ "${SOURCE_URL}" != */ ]] && SOURCE_URL="${SOURCE_URL}/"
HADOOP_RC_VERSION=$(tr "/" "\n" <<< "${SOURCE_URL}" | tail -n2)
HADOOP_VERSION=$(echo "${HADOOP_RC_VERSION}" | sed -e 's/-RC[0-9]//g' | sed -e 's/hadoop-//g')
JAVA_VERSION=$(java -version 2>&1 | cut -f3 -d' ' | head -n1 | sed -e 's/"//g')
OUTPUT_DIR="${OUTPUT_DIR:-$(pwd)}"
if [ ! -d "${OUTPUT_DIR}" ]; then
echo "Output directory ${OUTPUT_DIR} does not exist, please create it before running this script."
exit 1
fi
OUTPUT_PATH_PREFIX="${OUTPUT_DIR}"/"${HADOOP_RC_VERSION}"
# default value for verification targets, 0 = failed
SIGNATURE_PASSED=0
CHECKSUM_PASSED=0
RAT_CHECK_PASSED=0
BUILD_FROM_SOURCE_PASSED=0
BUILD_TAR_FROM_SOURCE_PASSED=0
function download_and_import_keys() {
KEY_FILE_URL="${KEY_FILE_URL:-https://downloads.apache.org/hadoop/common/KEYS}"
echo "Obtain and import the publisher key(s) from ${KEY_FILE_URL}"
# download the keys file into file KEYS
wget -O KEYS "${KEY_FILE_URL}"
gpg --import KEYS
if [ -n "${SIGNING_KEY}" ]; then
gpg --list-keys "${SIGNING_KEY}"
fi
}
function download_release_candidate () {
# get all files from release candidate repo
wget -r -np -N -nH --cut-dirs 4 "${SOURCE_URL}"
}
function verify_signatures() {
rm -f "${OUTPUT_PATH_PREFIX}"_verify_signatures
for file in *.tar.gz; do
gpg --verify "${file}".asc "${file}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_signatures && SIGNATURE_PASSED=1 || SIGNATURE_PASSED=0
done
}
function verify_checksums() {
rm -f "${OUTPUT_PATH_PREFIX}"_verify_checksums
SHA_EXT=$(find . -name "*.sha*" | awk -F '.' '{ print $NF }' | head -n 1)
for file in *.tar.gz; do
sha512sum --tag "${file}" > "${file}"."${SHA_EXT}".tmp
diff "${file}"."${SHA_EXT}".tmp "${file}"."${SHA_EXT}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_checksums && CHECKSUM_PASSED=1 || CHECKSUM_PASSED=0
rm -f "${file}"."${SHA_EXT}".tmp
done
}
function unzip_from_source() {
tar -zxvf hadoop-"${HADOOP_VERSION}"-src.tar.gz
cd hadoop-"${HADOOP_VERSION}"-src
}
function rat_test() {
rm -f "${OUTPUT_PATH_PREFIX}"_rat_test
mvn clean apache-rat:check "${MVN_PROPERTIES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_rat_test && RAT_CHECK_PASSED=1
}
function build_from_source() {
rm -f "${OUTPUT_PATH_PREFIX}"_build_from_source
# No unit test run.
mvn clean install "${MVN_PROPERTIES[@]}" -DskipTests "${MVN_PROFILES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_from_source && BUILD_FROM_SOURCE_PASSED=1
}
function build_tar_from_source() {
rm -f "${OUTPUT_PATH_PREFIX}"_build_tar_from_source
# No unit test run.
mvn clean package "${MVN_PROPERTIES[@]}" -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_tar_from_source && BUILD_TAR_FROM_SOURCE_PASSED=1
}
function execute() {
${1} || print_when_exit
}
function print_when_exit() {
cat << __EOF
* Signature: $( ((SIGNATURE_PASSED)) && echo "ok" || echo "failed" )
* Checksum : $( ((CHECKSUM_PASSED)) && echo "ok" || echo "failed" )
* Rat check (${JAVA_VERSION}): $( ((RAT_CHECK_PASSED)) && echo "ok" || echo "failed" )
- mvn clean apache-rat:check ${MVN_PROPERTIES[@]}
* Built from source (${JAVA_VERSION}): $( ((BUILD_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
- mvn clean install ${MVN_PROPERTIES[@]} -DskipTests ${MVN_PROFILES[@]}
* Built tar from source (${JAVA_VERSION}): $( ((BUILD_TAR_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
- mvn clean package ${MVN_PROPERTIES[@]} -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true
__EOF
if ((CHECKSUM_PASSED)) && ((SIGNATURE_PASSED)) && ((RAT_CHECK_PASSED)) && ((BUILD_FROM_SOURCE_PASSED)) && ((BUILD_TAR_FROM_SOURCE_PASSED)) ; then
exit 0
fi
exit 1
}
pushd "${OUTPUT_DIR}"
download_and_import_keys
download_release_candidate
pushd "${HADOOP_RC_VERSION}"
execute verify_signatures
execute verify_checksums
execute unzip_from_source
execute rat_test
execute build_from_source
execute build_tar_from_source
popd
popd
print_when_exit

View File

@ -1,259 +0,0 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is called from the Jenkinsfile, which ultimately runs
# the CI through Yetus.
# We use Ubuntu Focal as the main platform for building Hadoop, thus
# it runs for all the PRs. Additionally, we also ensure that
# Hadoop builds across the supported platforms whenever there's a change
# in any of the C/C++ files, C/C++ build files or platform changes.
## @description Check if the given extension is related to C/C++
## @param seeking
## @return 0 if yes
## @return 1 if no
is_c_cpp_extension() {
local c_cpp_extension=("c" "cc" "cpp" "h" "hpp")
local seeking=$1
for element in "${c_cpp_extension[@]}"; do
if [[ $element == "$seeking" ]]; then
return 0
fi
done
return 1
}
## @description Check if the given relative path corresponds to
## change in platform files
## @param in_path
## @return 0 if yes
## @return 1 if no
is_platform_change() {
declare in_path
in_path="${SOURCEDIR}"/"${1}"
for path in "${DOCKERFILE}" "${SOURCEDIR}"/dev-support/docker/pkg-resolver/*.json; do
if [ "${in_path}" == "${path}" ]; then
echo "Found C/C++ platform related changes in ${in_path}"
return 0
fi
done
return 1
}
## @description Checks if the given path corresponds to a change
## in C/C++ files or related to C/C++ build system
## @param path
## @return 0 if yes
## @return 1 if no
is_c_cpp_change() {
shopt -s nocasematch
local path=$1
declare filename
filename=$(basename -- "${path}")
extension=${filename##*.}
if is_c_cpp_extension "${extension}"; then
echo "Found C/C++ changes in ${path}"
return 0
fi
if [[ $filename =~ CMakeLists\.txt ]]; then
echo "Found C/C++ build related changes in ${path}"
return 0
fi
return 1
}
## @description Check if the CI needs to be run - CI will always run if
## IS_OPTIONAL is 0, or if there's any change in
## C/C++ files or C/C++ build or platform
## @return 0 if yes
## @return 1 if no
function check_ci_run() {
# Get the first commit of this PR relative to the trunk branch
firstCommitOfThisPr=$(git --git-dir "${SOURCEDIR}/.git" rev-parse origin/trunk)
# Loop over the paths of all the changed files and check if the criteria
# to run the CI has been satisfied
for path in $(git --git-dir "${SOURCEDIR}/.git" diff --name-only "${firstCommitOfThisPr}" HEAD); do
if is_c_cpp_change "${path}"; then
return 0
fi
if is_platform_change "${path}"; then
return 0
fi
done
# We must run the CI if it's not optional
if [ "$IS_OPTIONAL" -eq 0 ]; then
return 0
fi
return 1
}
## @description Run the CI using YETUS
function run_ci() {
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/test-patch.sh"
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 ]]; then
echo "Building in a Windows environment, skipping some Yetus related settings"
else
# run in docker mode and specifically point to our
# Dockerfile since we don't want to use the auto-pulled version.
YETUS_ARGS+=("--docker")
YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
YETUS_ARGS+=("--mvn-custom-repos")
YETUS_ARGS+=("--dockermemlimit=22g")
# test with Java 8 and 11
YETUS_ARGS+=("--java-home=/usr/lib/jvm/java-8-openjdk-amd64")
YETUS_ARGS+=("--multijdkdirs=/usr/lib/jvm/java-11-openjdk-amd64")
YETUS_ARGS+=("--multijdktests=compile")
fi
if [[ "$IS_NIGHTLY_BUILD" && "$IS_NIGHTLY_BUILD" == 1 ]]; then
YETUS_ARGS+=("--empty-patch")
YETUS_ARGS+=("--branch=${BRANCH_NAME}")
else
# this must be clean for every run
if [[ -d "${PATCHDIR}" ]]; then
rm -rf "${PATCHDIR:?}"
fi
mkdir -p "${PATCHDIR}"
# if given a JIRA issue, process it. If CHANGE_URL is set
# (e.g., Github Branch Source plugin), process it.
# otherwise exit, because we don't want Hadoop to do a
# full build. We wouldn't normally do this check for smaller
# projects. :)
if [[ -n "${JIRA_ISSUE_KEY}" ]]; then
YETUS_ARGS+=("${JIRA_ISSUE_KEY}")
elif [[ -z "${CHANGE_URL}" ]]; then
echo "Full build skipped" >"${PATCHDIR}/report.html"
exit 0
fi
# write Yetus report as GitHub comment (YETUS-1102)
YETUS_ARGS+=("--github-write-comment")
YETUS_ARGS+=("--github-use-emoji-vote")
fi
YETUS_ARGS+=("--patch-dir=${PATCHDIR}")
# where the source is located
YETUS_ARGS+=("--basedir=${SOURCEDIR}")
# our project defaults come from a personality file
YETUS_ARGS+=("--project=hadoop")
YETUS_ARGS+=("--personality=${SOURCEDIR}/dev-support/bin/hadoop.sh")
# lots of different output formats
YETUS_ARGS+=("--brief-report-file=${PATCHDIR}/brief.txt")
YETUS_ARGS+=("--console-report-file=${PATCHDIR}/console.txt")
YETUS_ARGS+=("--html-report-file=${PATCHDIR}/report.html")
# enable writing back to Github
YETUS_ARGS+=("--github-token=${GITHUB_TOKEN}")
# auto-kill any surefire stragglers during unit test runs
YETUS_ARGS+=("--reapermode=kill")
# set relatively high limits for ASF machines
# changing these to higher values may cause problems
# with other jobs on systemd-enabled machines
YETUS_ARGS+=("--proclimit=5500")
# -1 spotbugs issues that show up prior to the patch being applied
YETUS_ARGS+=("--spotbugs-strict-precheck")
# rsync these files back into the archive dir
YETUS_ARGS+=("--archive-list=checkstyle-errors.xml,spotbugsXml.xml")
# URL for user-side presentation in reports and such to our artifacts
# (needs to match the archive bits below)
YETUS_ARGS+=("--build-url-artifacts=artifact/out")
# plugins to enable
YETUS_ARGS+=("--plugins=all,-jira")
# don't let these tests cause -1s because we aren't really paying that
# much attention to them
YETUS_ARGS+=("--tests-filter=checkstyle")
# effectively treat dev-suport as a custom maven module
YETUS_ARGS+=("--skip-dirs=dev-support")
# help keep the ASF boxes clean
YETUS_ARGS+=("--sentinel")
# custom javadoc goals
YETUS_ARGS+=("--mvn-javadoc-goals=process-sources,javadoc:javadoc-no-fork")
"${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
}
## @description Cleans up the processes started by YETUS
function cleanup_ci_proc() {
# See YETUS-764
if [ -f "${PATCHDIR}/pidfile.txt" ]; then
echo "test-patch process appears to still be running: killing"
kill "$(cat "${PATCHDIR}/pidfile.txt")" || true
sleep 10
fi
if [ -f "${PATCHDIR}/cidfile.txt" ]; then
echo "test-patch container appears to still be running: killing"
docker kill "$(cat "${PATCHDIR}/cidfile.txt")" || true
fi
}
## @description Invokes github_status_recovery in YETUS's precommit
function github_status_recovery() {
YETUS_ARGS+=("--github-token=${GITHUB_TOKEN}")
YETUS_ARGS+=("--patch-dir=${PATCHDIR}")
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/github-status-recovery.sh"
/usr/bin/env bash "${TESTPATCHBIN}" "${YETUS_ARGS[@]}" "${EXTRA_ARGS}" || true
}
if [ -z "$1" ]; then
echo "Must specify an argument for jenkins.sh"
echo "run_ci - Runs the CI based on platform image as defined by DOCKERFILE"
echo "cleanup_ci_proc - Cleans up the processes spawned for running the CI"
echo "github_status_recovery - Sends Github status (refer to YETUS precommit for more details)"
exit 1
fi
# Process arguments to jenkins.sh
if [ "$1" == "run_ci" ]; then
# Check if the CI needs to be run, if so, do so :)
if check_ci_run; then
run_ci
else
echo "No C/C++ file or C/C++ build or platform changes found, will not run CI for this platform"
fi
elif [ "$1" == "cleanup_ci_proc" ]; then
cleanup_ci_proc
elif [ "$1" == "github_status_recovery" ]; then
github_status_recovery
else
echo "Don't know how to process $1"
exit 1
fi

View File

@ -69,9 +69,7 @@
<!-- Checks for line length violations. -->
<!-- See https://checkstyle.sourceforge.io/config_sizes.html#LineLength -->
<module name="LineLength">
<property name="max" value="100"/>
</module>
<module name="LineLength"/>
<module name="TreeWalker">
@ -122,8 +120,9 @@
<!-- Checks for imports -->
<!-- See http://checkstyle.sf.net/config_import.html -->
<module name="IllegalImport">
<property name="regexp" value="true"/>
<property name="illegalPkgs" value="sun"/>
<property name="regexp" value="true"/>
<property name="illegalPkgs" value="sun, com\.google\.common"/>
<property name="illegalClasses" value="^org\.apache\.hadoop\.thirdparty\.com\.google\.common\.io\.BaseEncoding, ^org\.apache\.hadoop\.thirdparty\.com\.google\.common\.base\.(Optional|Function|Predicate|Supplier), ^org\.apache\.hadoop\.thirdparty\.com\.google\.common\.collect\.(ImmutableListMultimap)"/>
</module>
<module name="RedundantImport"/>
<module name="UnusedImports"/>
@ -159,9 +158,7 @@
<!-- Checks for blocks. You know, those {}'s -->
<!-- See http://checkstyle.sf.net/config_blocks.html -->
<module name="AvoidNestedBlocks">
<property name="allowInSwitchCase" value="true"/>
</module>
<module name="AvoidNestedBlocks"/>
<module name="EmptyBlock"/>
<module name="LeftCurly"/>
<module name="NeedBraces"/>

View File

@ -67,13 +67,6 @@
</exclusion>
</exclusions>
</dependency>
<!-- snappy-java is native library and cannot be relocated. So we explicitly exclude it
from shaded jar to prevent possible conflict. Make it as transitive dependency to
make the downstream pull it. -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
</dependencies>
<profiles>
<profile>
@ -94,10 +87,13 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<configuration>
<createSourcesJar>true</createSourcesJar>
<shadeSourcesContent>true</shadeSourcesContent>
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<phase>package</phase>
@ -109,10 +105,6 @@
<includes>
<include>org.apache.hadoop:*</include>
</includes>
<excludes>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
</excludes>
</artifactSet>
<filters>
<!-- We get these package level classes from various yarn api jars -->
@ -151,12 +143,6 @@
<exclude>org/xml/sax/**/*</exclude>
<exclude>org/bouncycastle/*</exclude>
<exclude>org/bouncycastle/**/*</exclude>
<!-- Exclude snappy-java -->
<exclude>org/xerial/snappy/*</exclude>
<exclude>org/xerial/snappy/**/*</exclude>
<!-- Exclude org.widlfly.openssl -->
<exclude>org/wildfly/openssl/*</exclude>
<exclude>org/wildfly/openssl/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -175,8 +161,6 @@
<exclude>com/sun/security/**/*</exclude>
<exclude>com/sun/jndi/**/*</exclude>
<exclude>com/sun/management/**/*</exclude>
<exclude>com/ibm/security/*</exclude>
<exclude>com/ibm/security/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -235,9 +219,6 @@
<!-- Exclude config keys for Hadoop that look like package names -->
<exclude>net/topology/*</exclude>
<exclude>net/topology/**/*</exclude>
<!-- Exclude lz4-java -->
<exclude>net/jpountz/*</exclude>
<exclude>net/jpountz/**/*</exclude>
</excludes>
</relocation>
<!-- okio declares a top level package instead of nested -->
@ -247,7 +228,8 @@
</relocation>
</relocations>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<!-- Needed until MSHADE-182 -->
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<resource>NOTICE.txt</resource>

View File

@ -56,7 +56,7 @@
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.5.1</version>
<version>1.0-beta-3</version>
</dependency>
</dependencies>
<executions>
@ -90,8 +90,6 @@
<exclude>com.google.code.findbugs:jsr305</exclude>
<!-- Leave bouncycastle unshaded because it's signed with a special Oracle certificate so it can be a custom JCE security provider -->
<exclude>org.bouncycastle:*</exclude>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>

View File

@ -67,8 +67,6 @@ allowed_expr+="|^krb5_udp-template.conf$"
# Jetty uses this style sheet for directory listings. TODO ensure our
# internal use of jetty disallows directory listings and remove this.
allowed_expr+="|^jetty-dir.css$"
# Snappy java is native library. We cannot relocate it to under org/apache/hadoop.
allowed_expr+="|^org/xerial/"
allowed_expr+=")"
declare -i bad_artifacts=0

View File

@ -60,7 +60,7 @@
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.5.1</version>
<version>1.0-beta-3</version>
</dependency>
</dependencies>
<executions>
@ -98,8 +98,6 @@
<exclude>com.google.code.findbugs:jsr305</exclude>
<!-- Leave bouncycastle unshaded because it's signed with a special Oracle certificate so it can be a custom JCE security provider -->
<exclude>org.bouncycastle:*</exclude>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>

View File

@ -52,11 +52,6 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
@ -184,12 +179,6 @@
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<type>test-jar</type>
<exclusions>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
@ -197,12 +186,6 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
</dependencies>
</profile>
</profiles>

View File

@ -1,144 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.hadoop.example;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.io.*;
import java.util.Arrays;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Ensure that we can perform codec operations given the API and runtime jars
* by performing some simple smoke tests.
*/
public class ITUseHadoopCodecs {
private static final Logger LOG = LoggerFactory.getLogger(ITUseHadoopCodecs.class);
private Configuration haddopConf = new Configuration();
private int dataCount = 100;
private int dataSeed = new Random().nextInt();
@Test
public void testGzipCodec() throws IOException {
ZlibFactory.setNativeZlibLoaded(false);
assertFalse(ZlibFactory.isNativeZlibLoaded(haddopConf));
codecTest(haddopConf, dataSeed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(haddopConf, dataSeed, dataCount, "org.apache.hadoop.io.compress.GzipCodec");
}
@Test
public void testSnappyCodec() throws IOException {
codecTest(haddopConf, dataSeed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
codecTest(haddopConf, dataSeed, dataCount, "org.apache.hadoop.io.compress.SnappyCodec");
}
@Test
public void testLz4Codec() {
Arrays.asList(false, true).forEach(config -> {
haddopConf.setBoolean(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZ4_USELZ4HC_KEY,
config);
try {
codecTest(haddopConf, dataSeed, 0, "org.apache.hadoop.io.compress.Lz4Codec");
codecTest(haddopConf, dataSeed, dataCount, "org.apache.hadoop.io.compress.Lz4Codec");
} catch (IOException e) {
throw new RuntimeException("failed when running codecTest", e);
}
});
}
private void codecTest(Configuration conf, int seed, int count, String codecClass)
throws IOException {
// Create the codec
CompressionCodec codec = null;
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
LOG.info("Created a Codec object of type: " + codecClass);
// Generate data
DataOutputBuffer data = new DataOutputBuffer();
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i = 0; i < count; ++i) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
key.write(data);
value.write(data);
}
LOG.info("Generated " + count + " records");
// Compress data
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
try (CompressionOutputStream deflateFilter =
codec.createOutputStream(compressedDataBuffer);
DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter))) {
deflateOut.write(data.getData(), 0, data.getLength());
deflateOut.flush();
deflateFilter.finish();
}
// De-compress data
DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
DataInputBuffer originalData = new DataInputBuffer();
originalData.reset(data.getData(), 0, data.getLength());
try (CompressionInputStream inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
DataInputStream originalIn =
new DataInputStream(new BufferedInputStream(originalData))) {
// Check
int expected;
do {
expected = originalIn.read();
assertEquals("Inflated stream read by byte does not match",
expected, inflateFilter.read());
} while (expected != -1);
}
LOG.info("SUCCESS! Completed checking " + count + " records");
}
}

View File

@ -40,12 +40,6 @@
<artifactId>hadoop-client-api</artifactId>
<scope>runtime</scope>
</dependency>
<!-- This is the api's compile dependency, but we don't want it to be compile dependency here too. -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-runtime</artifactId>
@ -332,10 +326,6 @@
<groupId>org.apache.hadoop.thirdparty</groupId>
<artifactId>hadoop-shaded-guava</artifactId>
</exclusion>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Add optional runtime dependency on the in-development timeline server module
@ -407,8 +397,8 @@
<!-- Skip commons-logging:commons-logging-api because it looks like nothing actually included it -->
<!-- Skip jetty-util because it's in client -->
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<optional>true</optional>
<exclusions>
<exclusion>
@ -423,26 +413,30 @@
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.github.pjfanning</groupId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -451,23 +445,9 @@
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.enterprise</groupId>
<artifactId>cdi-api</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.cal10n</groupId>
<artifactId>cal10n-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- skip org.apache.avro:avro-ipc because it doesn't look like hadoop-common actually uses it -->
<dependency>
@ -671,6 +651,13 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<phase>package</phase>
@ -695,8 +682,6 @@
<!-- We need a filter that matches just those things that are included in the above artiacts -->
<!-- Leave bouncycastle unshaded because it's signed with a special Oracle certificate so it can be a custom JCE security provider -->
<exclude>org.bouncycastle:*</exclude>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
</excludes>
</artifactSet>
<filters>
@ -743,18 +728,6 @@
<exclude>testdata/*</exclude>
</excludes>
</filter>
<filter>
<artifact>com.fasterxml.jackson.*:*</artifact>
<excludes>
<exclude>META-INF/versions/11/module-info.class</exclude>
</excludes>
</filter>
<filter>
<artifact>com.google.code.gson:gson</artifact>
<excludes>
<exclude>META-INF/versions/9/module-info.class</exclude>
</excludes>
</filter>
<!-- Mockito tries to include its own unrelocated copy of hamcrest. :( -->
<filter>
@ -910,9 +883,6 @@
<exclude>org/xml/sax/**/*</exclude>
<exclude>org/bouncycastle/*</exclude>
<exclude>org/bouncycastle/**/*</exclude>
<!-- Exclude snappy-java -->
<exclude>org/xerial/snappy/*</exclude>
<exclude>org/xerial/snappy/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -938,8 +908,6 @@
<exclude>com/sun/security/**/*</exclude>
<exclude>com/sun/jndi/**/*</exclude>
<exclude>com/sun/management/**/*</exclude>
<exclude>com/ibm/security/*</exclude>
<exclude>com/ibm/security/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -1033,9 +1001,6 @@
<!-- Exclude config keys for Hadoop that look like package names -->
<exclude>net/topology/*</exclude>
<exclude>net/topology/**/*</exclude>
<!-- Exclude lz4-java -->
<exclude>net/jpountz/*</exclude>
<exclude>net/jpountz/**/*</exclude>
</excludes>
</relocation>
<!-- okio declares a top level package instead of nested -->
@ -1045,7 +1010,8 @@
</relocation>
</relocations>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<!-- Needed until MSHADE-182 -->
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<resources>

View File

@ -60,12 +60,6 @@
<artifactId>hadoop-client-api</artifactId>
<scope>runtime</scope>
</dependency>
<!-- This is the api's compile dependency, but we don't want it to be compile dependency here too. -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<scope>runtime</scope>
</dependency>
<!-- This comes from our parent pom. If we don't expressly change it here to get included,
downstream will get warnings at compile time. -->
<dependency>
@ -128,6 +122,13 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<executions>
<execution>
<phase>package</phase>
@ -148,17 +149,12 @@
<!-- Leave javax APIs that are stable -->
<!-- the jdk ships part of the javax.annotation namespace, so if we want to relocate this we'll have to care it out by class :( -->
<exclude>com.google.code.findbugs:jsr305</exclude>
<exclude>io.netty:*</exclude>
<exclude>io.dropwizard.metrics:metrics-core</exclude>
<exclude>org.eclipse.jetty:jetty-servlet</exclude>
<exclude>org.eclipse.jetty:jetty-security</exclude>
<exclude>org.ow2.asm:*</exclude>
<!-- Leave bouncycastle unshaded because it's signed with a special Oracle certificate so it can be a custom JCE security provider -->
<exclude>org.bouncycastle:*</exclude>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
<!-- leave out kotlin classes -->
<exclude>org.jetbrains.kotlin:*</exclude>
</excludes>
</artifactSet>
<filters>
@ -238,19 +234,6 @@
<exclude>google/protobuf/**/*.proto</exclude>
</excludes>
</filter>
<filter>
<artifact>com.fasterxml.jackson.*:*</artifact>
<excludes>
<exclude>META-INF/versions/11/module-info.class</exclude>
</excludes>
</filter>
<filter>
<artifact>com.google.code.gson:gson</artifact>
<excludes>
<exclude>META-INF/versions/9/module-info.class</exclude>
</excludes>
</filter>
</filters>
<relocations>
<relocation>
@ -276,9 +259,6 @@
<exclude>org/xml/sax/**/*</exclude>
<exclude>org/bouncycastle/*</exclude>
<exclude>org/bouncycastle/**/*</exclude>
<!-- Exclude snappy-java -->
<exclude>org/xerial/snappy/*</exclude>
<exclude>org/xerial/snappy/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -297,8 +277,6 @@
<exclude>com/sun/security/**/*</exclude>
<exclude>com/sun/jndi/**/*</exclude>
<exclude>com/sun/management/**/*</exclude>
<exclude>com/ibm/security/*</exclude>
<exclude>com/ibm/security/**/*</exclude>
</excludes>
</relocation>
<relocation>
@ -371,9 +349,6 @@
<!-- Exclude config keys for Hadoop that look like package names -->
<exclude>net/topology/*</exclude>
<exclude>net/topology/**/*</exclude>
<!-- Exclude lz4-java -->
<exclude>net/jpountz/*</exclude>
<exclude>net/jpountz/**/*</exclude>
</excludes>
</relocation>
<!-- okio declares a top level package instead of nested -->
@ -393,7 +368,8 @@
-->
</relocations>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<!-- Needed until MSHADE-182 -->
<transformer implementation="org.apache.hadoop.maven.plugin.shade.resource.ServicesResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<resources>

View File

@ -66,13 +66,9 @@
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.pjfanning</groupId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
@ -118,18 +114,6 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
</exclusion>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-common</artifactId>
</exclusion>
<exclusion>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
@ -183,13 +167,9 @@
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.pjfanning</groupId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
@ -238,13 +218,9 @@
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.pjfanning</groupId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
@ -299,13 +275,9 @@
<artifactId>guice-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.pjfanning</groupId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>

View File

@ -101,10 +101,6 @@
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -127,6 +123,11 @@
<artifactId>hadoop-azure-datalake</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-openstack</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-cos</artifactId>

View File

@ -109,7 +109,7 @@
<dependency>
<groupId>com.qcloud</groupId>
<artifactId>cos_api-bundle</artifactId>
<version>5.6.69</version>
<version>5.6.19</version>
<scope>compile</scope>
</dependency>

View File

@ -24,7 +24,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import com.qcloud.cos.auth.AnonymousCOSCredentials;
import com.qcloud.cos.auth.COSCredentials;
import com.qcloud.cos.auth.COSCredentialsProvider;

View File

@ -60,9 +60,10 @@
<build>
<plugins>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<configuration>
<findbugsXmlOutput>true</findbugsXmlOutput>
<xmlOutput>true</xmlOutput>
<excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml
</excludeFilterFile>
@ -100,14 +101,10 @@
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
</exclusion>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -165,14 +162,6 @@
<artifactId>okio</artifactId>
<groupId>com.squareup.okio</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-core</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -188,4 +177,4 @@
<scope>test</scope>
</dependency>
</dependencies>
</project>
</project>

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;

View File

@ -18,7 +18,8 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException;
import com.obs.services.model.AbortMultipartUploadRequest;
@ -52,7 +53,6 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIOException;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSExceptionMessages;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException;
import com.obs.services.model.AccessControlList;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import com.obs.services.exception.ObsException;
import java.io.IOException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException;
import com.obs.services.model.GetObjectRequest;

View File

@ -55,7 +55,6 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
@ -873,12 +872,8 @@ final class OBSObjectBucketUtils {
directories.add(p.toString());
}
while (p.compareTo(sourcePath) > 0) {
Optional<Path> parent = p.getOptionalParentPath();
if (!parent.isPresent()) {
break;
}
p = parent.get();
if (p.compareTo(sourcePath) == 0) {
p = p.getParent();
if (p.isRoot() || p.compareTo(sourcePath) == 0) {
break;
}
directories.add(p.toString());

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.obs;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import com.obs.services.ObsClient;
import com.obs.services.exception.ObsException;
import com.obs.services.model.AbortMultipartUploadRequest;

View File

@ -1,43 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.classification;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotates a program element that exists, or is more widely visible than
* otherwise necessary, specifically for use in test code.
* More precisely <i>test code within the hadoop-* modules</i>.
* Moreover, this gives the implicit scope and stability of:
* <pre>
* {@link InterfaceAudience.Private}
* {@link InterfaceStability.Unstable}
* </pre>
* If external modules need to access/override these methods, then
* they MUST be re-scoped as public/limited private.
*/
@Retention(RetentionPolicy.CLASS)
@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD, ElementType.CONSTRUCTOR })
@Documented
public @interface VisibleForTesting {
}

View File

@ -116,7 +116,6 @@ public class RequestLoggerFilter implements Filter {
public void addCookie(Cookie cookie) {
super.addCookie(cookie);
List<String> cookies = getHeaderValues("Set-Cookie", false);
cookies.addAll(getHeaderValues("set-cookie", false));
cookies.add(cookie.getName() + "=" + cookie.getValue());
}

View File

@ -110,21 +110,24 @@
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<!-- HACK. Transitive dependency for nimbus-jose-jwt. Needed for
packaging. Please re-check this version when updating
nimbus-jose-jwt. Please read HADOOP-14903 for more details.
-->
<exclusion>
<groupId>net.minidev</groupId>
<artifactId>json-smart</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.minidev</groupId>
<artifactId>json-smart</artifactId>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>

View File

@ -92,9 +92,6 @@ public class AuthenticatedURL {
@Override
public void put(URI uri, Map<String, List<String>> responseHeaders) {
List<String> headers = responseHeaders.get("Set-Cookie");
if (headers == null) {
headers = responseHeaders.get("set-cookie");
}
if (headers != null) {
for (String header : headers) {
List<HttpCookie> cookies;

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.client;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import java.lang.reflect.Constructor;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.authentication.server.HttpConstants;
@ -280,9 +280,6 @@ public class KerberosAuthenticator implements Authenticator {
boolean negotiate = false;
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
if (authHeader == null) {
authHeader = conn.getHeaderField(WWW_AUTHENTICATE.toLowerCase());
}
negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
}
return negotiate;
@ -391,9 +388,6 @@ public class KerberosAuthenticator implements Authenticator {
int status = conn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
if (authHeader == null) {
authHeader = conn.getHeaderField(WWW_AUTHENTICATE.toLowerCase());
}
if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE +
"' header incorrect: " + authHeader);

View File

@ -237,8 +237,8 @@ public class AuthenticationFilter implements Filter {
provider.init(config, ctx, validity);
} catch (Exception e) {
if (!disallowFallbackToRandomSecretProvider) {
LOG.warn("Unable to initialize FileSignerSecretProvider, " +
"falling back to use random secrets. Reason: " + e.getMessage());
LOG.info("Unable to initialize FileSignerSecretProvider, " +
"falling back to use random secrets.");
provider = new RandomSignerSecretProvider();
provider.init(config, ctx, validity);
} else {
@ -616,9 +616,7 @@ public class AuthenticationFilter implements Filter {
// present.. reset to 403 if not found..
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
&& (!httpResponse.containsHeader(
KerberosAuthenticator.WWW_AUTHENTICATE)
&& !httpResponse.containsHeader(
KerberosAuthenticator.WWW_AUTHENTICATE.toLowerCase()))) {
KerberosAuthenticator.WWW_AUTHENTICATE))) {
errCode = HttpServletResponse.SC_FORBIDDEN;
}
// After Jetty 9.4.21, sendError() no longer allows a custom message.

View File

@ -20,6 +20,8 @@ import static org.apache.hadoop.security.authentication.server.HttpConstants.DIG
import java.util.Locale;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
/**
* This is a utility class designed to provide functionality related to
* {@link AuthenticationHandler}.
@ -42,10 +44,8 @@ public final class AuthenticationHandlerUtil {
* @return an instance of AuthenticationHandler implementation.
*/
public static String getAuthenticationHandlerClassName(String authHandler) {
if (authHandler == null) {
throw new NullPointerException();
}
String handlerName = authHandler.toLowerCase(Locale.ENGLISH);
String handlerName =
Preconditions.checkNotNull(authHandler).toLowerCase(Locale.ENGLISH);
String authHandlerClassName = null;
@ -98,14 +98,8 @@ public final class AuthenticationHandlerUtil {
* specified authentication scheme false Otherwise.
*/
public static boolean matchAuthScheme(String scheme, String auth) {
if (scheme == null) {
throw new NullPointerException();
}
scheme = scheme.trim();
if (auth == null) {
throw new NullPointerException();
}
auth = auth.trim();
scheme = Preconditions.checkNotNull(scheme).trim();
auth = Preconditions.checkNotNull(auth).trim();
return auth.regionMatches(true, 0, scheme, 0, scheme.length());
}
}

View File

@ -28,7 +28,7 @@ import java.text.ParseException;
import java.security.interfaces.RSAPublicKey;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.util.CertificateUtil;
import org.slf4j.Logger;

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.server;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import org.apache.commons.codec.binary.Base64;

View File

@ -38,7 +38,8 @@ import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
/**
* The {@link LdapAuthenticationHandler} implements the BASIC authentication
@ -143,20 +144,15 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
this.enableStartTls =
Boolean.valueOf(config.getProperty(ENABLE_START_TLS, "false"));
if (this.providerUrl == null) {
throw new NullPointerException("The LDAP URI can not be null");
}
if (!((this.baseDN == null)
^ (this.ldapDomain == null))) {
throw new IllegalArgumentException(
"Either LDAP base DN or LDAP domain value needs to be specified");
}
Preconditions
.checkNotNull(this.providerUrl, "The LDAP URI can not be null");
Preconditions.checkArgument((this.baseDN == null)
^ (this.ldapDomain == null),
"Either LDAP base DN or LDAP domain value needs to be specified");
if (this.enableStartTls) {
String tmp = this.providerUrl.toLowerCase();
if (tmp.startsWith("ldaps")) {
throw new IllegalArgumentException(
"Can not use ldaps and StartTLS option at the same time");
}
Preconditions.checkArgument(!tmp.startsWith("ldaps"),
"Can not use ldaps and StartTLS option at the same time");
}
}

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
/**
@ -113,10 +114,10 @@ public class MultiSchemeAuthenticationHandler implements
}
this.types.clear();
if (config.getProperty(SCHEMES_PROPERTY) == null) {
throw new NullPointerException(SCHEMES_PROPERTY + " system property is not specified.");
}
String schemesProperty = config.getProperty(SCHEMES_PROPERTY);
String schemesProperty =
Preconditions.checkNotNull(config.getProperty(SCHEMES_PROPERTY),
"%s system property is not specified.", SCHEMES_PROPERTY);
for (String scheme : STR_SPLITTER.split(schemesProperty)) {
scheme = AuthenticationHandlerUtil.checkAuthScheme(scheme);
if (schemeToAuthHandlerMapping.containsKey(scheme)) {
@ -127,10 +128,8 @@ public class MultiSchemeAuthenticationHandler implements
String authHandlerPropName =
String.format(AUTH_HANDLER_PROPERTY, scheme).toLowerCase();
String authHandlerName = config.getProperty(authHandlerPropName);
if (authHandlerName == null) {
throw new NullPointerException(
"No auth handler configured for scheme " + scheme);
}
Preconditions.checkNotNull(authHandlerName,
"No auth handler configured for scheme %s.", scheme);
String authHandlerClassName =
AuthenticationHandlerUtil
@ -146,9 +145,7 @@ public class MultiSchemeAuthenticationHandler implements
protected AuthenticationHandler initializeAuthHandler(
String authHandlerClassName, Properties config) throws ServletException {
try {
if (authHandlerClassName == null) {
throw new NullPointerException();
}
Preconditions.checkNotNull(authHandlerClassName);
logger.debug("Initializing Authentication handler of type "
+ authHandlerClassName);
Class<?> klass =

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.security.authentication.util;
import java.io.ByteArrayInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.security.PublicKey;
import java.security.cert.CertificateException;

View File

@ -13,15 +13,15 @@
*/
package org.apache.hadoop.security.authentication.util;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import javax.servlet.ServletContext;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.charset.Charset;
import java.util.Properties;
/**
@ -43,24 +43,29 @@ public class FileSignerSecretProvider extends SignerSecretProvider {
String signatureSecretFile = config.getProperty(
AuthenticationFilter.SIGNATURE_SECRET_FILE, null);
Reader reader = null;
if (signatureSecretFile != null) {
try (Reader reader = new InputStreamReader(Files.newInputStream(
Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) {
try {
StringBuilder sb = new StringBuilder();
reader = new InputStreamReader(
new FileInputStream(signatureSecretFile), Charsets.UTF_8);
int c = reader.read();
while (c > -1) {
sb.append((char) c);
c = reader.read();
}
secret = sb.toString().getBytes(StandardCharsets.UTF_8);
if (secret.length == 0) {
throw new RuntimeException("No secret in signature secret file: "
+ signatureSecretFile);
}
secret = sb.toString().getBytes(Charset.forName("UTF-8"));
} catch (IOException ex) {
throw new RuntimeException("Could not read signature secret file: " +
signatureSecretFile);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
// nothing to do
}
}
}
}

View File

@ -1,77 +0,0 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.util;
import java.util.HashMap;
import java.util.Map;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
/**
* Creates a programmatic version of a jaas.conf file. This can be used
* instead of writing a jaas.conf file and setting the system property,
* "java.security.auth.login.config", to point to that file. It is meant to be
* used for connecting to ZooKeeper.
*/
public class JaasConfiguration extends Configuration {
private final javax.security.auth.login.Configuration baseConfig =
javax.security.auth.login.Configuration.getConfiguration();
private final AppConfigurationEntry[] entry;
private final String entryName;
/**
* Add an entry to the jaas configuration with the passed in name,
* principal, and keytab. The other necessary options will be set for you.
*
* @param entryName The name of the entry (e.g. "Client")
* @param principal The principal of the user
* @param keytab The location of the keytab
*/
public JaasConfiguration(String entryName, String principal, String keytab) {
this.entryName = entryName;
Map<String, String> options = new HashMap<>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("useTicketCache", "false");
options.put("refreshKrb5Config", "true");
String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
if ("true".equalsIgnoreCase(jaasEnvVar)) {
options.put("debug", "true");
}
entry = new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
return (entryName.equals(name)) ? entry : ((baseConfig != null)
? baseConfig.getAppConfigurationEntry(name) : null);
}
private String getKrb5LoginModuleName() {
String krb5LoginModuleName;
if (System.getProperty("java.vendor").contains("IBM")) {
krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
} else {
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
}
return krb5LoginModuleName;
}
}

View File

@ -26,7 +26,7 @@ import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;

View File

@ -79,16 +79,11 @@ public class KerberosUtil {
*
* @return Oid instance
* @param oidName The oid Name
* @throws ClassNotFoundException for backward compatibility.
* @throws GSSException for backward compatibility.
* @throws NoSuchFieldException if the input is not supported.
* @throws IllegalAccessException for backward compatibility.
*
*/
@Deprecated
public static Oid getOidInstance(String oidName)
throws ClassNotFoundException, GSSException, NoSuchFieldException,
IllegalAccessException {
throws NoSuchFieldException {
switch (oidName) {
case "GSS_SPNEGO_MECH_OID":
return GSS_SPNEGO_MECH_OID;
@ -236,7 +231,7 @@ public class KerberosUtil {
*/
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
Set<String> principals = new HashSet<>();
Set<String> principals = new HashSet<String>();
List<PrincipalName> entries = keytab.getPrincipals();
for (PrincipalName entry : entries) {
principals.add(entry.getName().replace("\\", "/"));

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import java.security.SecureRandom;
import java.util.Random;

View File

@ -18,7 +18,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.servlet.ServletContext;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;

View File

@ -13,13 +13,16 @@
*/
package org.apache.hadoop.security.authentication.util;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import java.nio.ByteBuffer;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.servlet.ServletContext;
import org.apache.curator.RetryPolicy;
@ -426,4 +429,62 @@ public class ZKSignerSecretProvider extends RolloverSignerSecretProvider {
return saslACL;
}
}
/**
* Creates a programmatic version of a jaas.conf file. This can be used
* instead of writing a jaas.conf file and setting the system property,
* "java.security.auth.login.config", to point to that file. It is meant to be
* used for connecting to ZooKeeper.
*/
@InterfaceAudience.Private
public static class JaasConfiguration extends Configuration {
private final javax.security.auth.login.Configuration baseConfig =
javax.security.auth.login.Configuration.getConfiguration();
private static AppConfigurationEntry[] entry;
private String entryName;
/**
* Add an entry to the jaas configuration with the passed in name,
* principal, and keytab. The other necessary options will be set for you.
*
* @param entryName The name of the entry (e.g. "Client")
* @param principal The principal of the user
* @param keytab The location of the keytab
*/
public JaasConfiguration(String entryName, String principal, String keytab) {
this.entryName = entryName;
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("useTicketCache", "false");
options.put("refreshKrb5Config", "true");
String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
options.put("debug", "true");
}
entry = new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
return (entryName.equals(name)) ? entry : ((baseConfig != null)
? baseConfig.getAppConfigurationEntry(name) : null);
}
private String getKrb5LoginModuleName() {
String krb5LoginModuleName;
if (System.getProperty("java.vendor").contains("IBM")) {
krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
} else {
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
}
return krb5LoginModuleName;
}
}
}

View File

@ -18,10 +18,6 @@
package org.apache.hadoop.util;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -37,71 +33,21 @@ public class PlatformName {
* per the java-vm.
*/
public static final String PLATFORM_NAME =
(System.getProperty("os.name").startsWith("Windows") ?
System.getenv("os") : System.getProperty("os.name"))
+ "-" + System.getProperty("os.arch") + "-"
+ System.getProperty("sun.arch.data.model");
(System.getProperty("os.name").startsWith("Windows")
? System.getenv("os") : System.getProperty("os.name"))
+ "-" + System.getProperty("os.arch")
+ "-" + System.getProperty("sun.arch.data.model");
/**
* The java vendor name used in this platform.
*/
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
/**
* Define a system class accessor that is open to changes in underlying implementations
* of the system class loader modules.
*/
private static final class SystemClassAccessor extends ClassLoader {
public Class<?> getSystemClass(String className) throws ClassNotFoundException {
return findSystemClass(className);
}
}
/**
* A public static variable to indicate the current java vendor is
* IBM and the type is Java Technology Edition which provides its
* own implementations of many security packages and Cipher suites.
* Note that these are not provided in Semeru runtimes:
* See https://developer.ibm.com/languages/java/semeru-runtimes for details.
* IBM java or not.
*/
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM") &&
hasIbmTechnologyEditionModules();
private static boolean hasIbmTechnologyEditionModules() {
return Arrays.asList(
"com.ibm.security.auth.module.JAASLoginModule",
"com.ibm.security.auth.module.Win64LoginModule",
"com.ibm.security.auth.module.NTLoginModule",
"com.ibm.security.auth.module.AIX64LoginModule",
"com.ibm.security.auth.module.LinuxLoginModule",
"com.ibm.security.auth.module.Krb5LoginModule"
).stream().anyMatch((module) -> isSystemClassAvailable(module));
}
/**
* In rare cases where different behaviour is performed based on the JVM vendor
* this method should be used to test for a unique JVM class provided by the
* vendor rather than using the vendor method. For example if on JVM provides a
* different Kerberos login module testing for that login module being loadable
* before configuring to use it is preferable to using the vendor data.
*
* @param className the name of a class in the JVM to test for
* @return true if the class is available, false otherwise.
*/
private static boolean isSystemClassAvailable(String className) {
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
try {
// Using ClassLoader.findSystemClass() instead of
// Class.forName(className, false, null) because Class.forName with a null
// ClassLoader only looks at the boot ClassLoader with Java 9 and above
// which doesn't look at all the modules available to the findSystemClass.
new SystemClassAccessor().getSystemClass(className);
return true;
} catch (Exception ignored) {
return false;
}
});
}
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
public static void main(String[] args) {
System.out.println(PLATFORM_NAME);

View File

@ -24,7 +24,7 @@ This filter must be configured in front of all the web application resources tha
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part of the web application classpath as well as the Log4j configuration file.
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application classpath as well as the Log4j configuration file.
### Common Configuration parameters

View File

@ -108,9 +108,9 @@ public class KerberosTestUtils {
public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
LoginContext loginContext = null;
try {
Set<Principal> principals = new HashSet<>();
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
loginContext.login();
subject = loginContext.getSubject();

View File

@ -89,44 +89,6 @@ public class TestAuthenticatedURL {
}
}
@Test
public void testExtractTokenCookieHeader() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK);
String tokenStr = "foo";
Map<String, List<String>> headers = new HashMap<>();
List<String> cookies = new ArrayList<>();
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
headers.put("Set-Cookie", cookies);
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
AuthenticatedURL.extractToken(conn, token);
Assert.assertTrue(token.isSet());
}
@Test
public void testExtractTokenLowerCaseCookieHeader() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK);
String tokenStr = "foo";
Map<String, List<String>> headers = new HashMap<>();
List<String> cookies = new ArrayList<>();
cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr);
headers.put("set-cookie", cookies);
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
AuthenticatedURL.extractToken(conn, token);
Assert.assertTrue(token.isSet());
}
@Test
public void testConnectionConfigurator() throws Exception {
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);

View File

@ -21,13 +21,8 @@ import static org.apache.hadoop.security.authentication.server.KerberosAuthentic
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.NAME_RULES;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.CharacterCodingException;
import javax.security.sasl.AuthenticationException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
@ -37,12 +32,10 @@ import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHa
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.File;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Callable;
@ -255,79 +248,4 @@ public class TestKerberosAuthenticator extends KerberosSecurityTestcase {
Assert.assertTrue(ex.equals(ex2));
}
@Test(timeout = 60000)
public void testNegotiate() throws NoSuchMethodException, InvocationTargetException,
IllegalAccessException, IOException {
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE)).
thenReturn(KerberosAuthenticator.NEGOTIATE);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
Method method = KerberosAuthenticator.class.getDeclaredMethod("isNegotiate",
HttpURLConnection.class);
method.setAccessible(true);
Assert.assertTrue((boolean)method.invoke(kerberosAuthenticator, conn));
}
@Test(timeout = 60000)
public void testNegotiateLowerCase() throws NoSuchMethodException, InvocationTargetException,
IllegalAccessException, IOException {
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getHeaderField("www-authenticate"))
.thenReturn(KerberosAuthenticator.NEGOTIATE);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
Method method = KerberosAuthenticator.class.getDeclaredMethod("isNegotiate",
HttpURLConnection.class);
method.setAccessible(true);
Assert.assertTrue((boolean)method.invoke(kerberosAuthenticator, conn));
}
@Test(timeout = 60000)
public void testReadToken() throws NoSuchMethodException, IOException, IllegalAccessException,
InvocationTargetException {
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
FieldUtils.writeField(kerberosAuthenticator, "base64", new Base64(), true);
Base64 base64 = new Base64();
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
Mockito.when(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE))
.thenReturn(KerberosAuthenticator.NEGOTIATE + " " +
Arrays.toString(base64.encode("foobar".getBytes())));
Method method = KerberosAuthenticator.class.getDeclaredMethod("readToken",
HttpURLConnection.class);
method.setAccessible(true);
method.invoke(kerberosAuthenticator, conn); // expecting this not to throw an exception
}
@Test(timeout = 60000)
public void testReadTokenLowerCase() throws NoSuchMethodException, IOException,
IllegalAccessException, InvocationTargetException {
KerberosAuthenticator kerberosAuthenticator = new KerberosAuthenticator();
FieldUtils.writeField(kerberosAuthenticator, "base64", new Base64(), true);
Base64 base64 = new Base64();
HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_UNAUTHORIZED);
Mockito.when(conn.getHeaderField("www-authenticate"))
.thenReturn(KerberosAuthenticator.NEGOTIATE +
Arrays.toString(base64.encode("foobar".getBytes())));
Method method = KerberosAuthenticator.class.getDeclaredMethod("readToken",
HttpURLConnection.class);
method.setAccessible(true);
method.invoke(kerberosAuthenticator, conn); // expecting this not to throw an exception
}
}

View File

@ -305,34 +305,6 @@ public class TestAuthenticationFilter {
filter.destroy();
}
}
@Test
public void testEmptySecretFileFallbacksToRandomSecret() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(
AuthenticationFilter.AUTH_TYPE)).thenReturn("simple");
File secretFile = File.createTempFile("test_empty_secret", ".txt");
secretFile.deleteOnExit();
Assert.assertTrue(secretFile.exists());
Mockito.when(config.getInitParameter(
AuthenticationFilter.SIGNATURE_SECRET_FILE))
.thenReturn(secretFile.getAbsolutePath());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector<>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.SIGNATURE_SECRET_FILE)).elements());
ServletContext context = Mockito.mock(ServletContext.class);
Mockito.when(context.getAttribute(
AuthenticationFilter.SIGNER_SECRET_PROVIDER_ATTRIBUTE))
.thenReturn(null);
Mockito.when(config.getServletContext()).thenReturn(context);
filter.init(config);
Assert.assertTrue(filter.isRandomSecret());
} finally {
filter.destroy();
}
}
@Test
public void testInitCaseSensitivity() throws Exception {
@ -574,44 +546,6 @@ public class TestAuthenticationFilter {
}
}
@Test
public void testDoFilterNotAuthenticatedLowerCase() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter("management.operation.return")).
thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector<>(
Arrays.asList(AuthenticationFilter.AUTH_TYPE,
"management.operation.return")).elements());
getMockedServletContextWithStringSigner(config);
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
FilterChain chain = Mockito.mock(FilterChain.class);
Mockito.doAnswer((Answer<Object>) invocation -> {
Assert.fail();
return null;
}).when(chain).doFilter(any(), any());
Mockito.when(response.containsHeader("www-authenticate")).thenReturn(true);
filter.doFilter(request, response, chain);
Mockito.verify(response).sendError(
HttpServletResponse.SC_UNAUTHORIZED, "Authentication required");
} finally {
filter.destroy();
}
}
private void _testDoFilterAuthentication(boolean withDomainPath,
boolean invalidToken,
boolean expired) throws Exception {

View File

@ -25,6 +25,7 @@ import java.security.interfaces.RSAPublicKey;
import java.util.List;
import java.util.ArrayList;
import java.util.Properties;
import java.util.Vector;
import java.util.Date;
import javax.servlet.ServletException;

View File

@ -13,6 +13,7 @@
*/
package org.apache.hadoop.security.authentication.server;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
import org.junit.Assert;
import org.junit.Test;

View File

@ -17,7 +17,8 @@ import java.nio.charset.Charset;
import java.util.Properties;
import javax.servlet.ServletContext;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceStability;
/**

View File

@ -16,16 +16,12 @@ package org.apache.hadoop.security.authentication.util;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.junit.Assert;
import org.junit.Test;
import org.junit.function.ThrowingRunnable;
import java.io.File;
import java.io.FileWriter;
import java.io.Writer;
import java.util.Properties;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
public class TestFileSignerSecretProvider {
@Test
@ -52,27 +48,4 @@ public class TestFileSignerSecretProvider {
Assert.assertEquals(1, allSecrets.length);
Assert.assertArrayEquals(secretValue.getBytes(), allSecrets[0]);
}
@Test
public void testEmptySecretFileThrows() throws Exception {
File secretFile = File.createTempFile("test_empty_secret", ".txt");
assertTrue(secretFile.exists());
FileSignerSecretProvider secretProvider
= new FileSignerSecretProvider();
Properties secretProviderProps = new Properties();
secretProviderProps.setProperty(
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
Exception exception =
assertThrows(RuntimeException.class, new ThrowingRunnable() {
@Override
public void run() throws Throwable {
secretProvider.init(secretProviderProps, null, -1);
}
});
assertTrue(exception.getMessage().startsWith(
"No secret in signature secret file:"));
}
}

View File

@ -32,8 +32,8 @@ public class TestJaasConfiguration {
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
}
JaasConfiguration jConf =
new JaasConfiguration("foo", "foo/localhost",
ZKSignerSecretProvider.JaasConfiguration jConf =
new ZKSignerSecretProvider.JaasConfiguration("foo", "foo/localhost",
"/some/location/foo.keytab");
AppConfigurationEntry[] entries = jConf.getAppConfigurationEntry("bar");
Assert.assertNull(entries);

View File

@ -379,6 +379,21 @@
<Bug code="JLM" />
</Match>
<!--
OpenStack Swift FS module -closes streams in a different method
from where they are opened.
-->
<Match>
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
<Method name="uploadFileAttempt"/>
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
</Match>
<Match>
<Class name="org.apache.hadoop.fs.swift.snative.SwiftNativeOutputStream"/>
<Method name="uploadFilePartAttempt"/>
<Bug pattern="OBL_UNSATISFIED_OBLIGATION"/>
</Match>
<!-- code from maven source, null value is checked at callee side. -->
<Match>
<Class name="org.apache.hadoop.util.ComparableVersion$ListItem" />

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More