HDDS-1255. Refactor ozone acceptance test to allow run in secure mode

This commit is contained in:
Ajay Kumar 2019-04-01 17:04:28 +02:00 committed by Márton Elek
parent 0df949202a
commit 5f951ea2e3
No known key found for this signature in database
GPG Key ID: D51EA8F00EE79B28
9 changed files with 184 additions and 184 deletions

View File

@ -108,6 +108,7 @@ LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
OZONE_DATANODE_SECURE_USER=root
SECURITY_ENABLED=true
KEYTAB_DIR=/etc/security/keytabs
KERBEROS_KEYTABS=dn om scm HTTP testuser s3g
KERBEROS_KEYSTORES=hadoop

View File

@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Smoketest ozone secure cluster
Resource commonlib.robot
Suite Setup Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user

View File

@ -16,10 +16,20 @@
*** Settings ***
Documentation Smoketest ozone cluster startup
Library OperatingSystem
Library BuiltIn
Resource ../commonlib.robot
*** Test Cases ***
*** Variables ***
${user} hadoop
${count} 4
*** Keywords ***
Set username
${hostname} = Execute hostname
Set Suite Variable ${user} testuser/${hostname}@EXAMPLE.COM
[return] ${user}
*** Test Cases ***
Initiating freon to generate data
${result} = Execute ozone freon randomkeys --numOfVolumes 5 --numOfBuckets 5 --numOfKeys 5 --numOfThreads 1
Wait Until Keyword Succeeds 3min 10sec Should contain ${result} Number of Keys added: 125
@ -31,10 +41,11 @@ Testing audit parser
${result} = Execute ozone auditparser /opt/hadoop/audit.db template top5cmds
Should Contain ${result} ALLOCATE_KEY
${result} = Execute ozone auditparser /opt/hadoop/audit.db template top5users
Should Contain ${result} hadoop
Run Keyword If '${SECURITY_ENABLED}' == 'true' Set username
Should Contain ${result} ${user}
${result} = Execute ozone auditparser /opt/hadoop/audit.db query "select count(*) from audit where op='CREATE_VOLUME' and RESULT='SUCCESS'"
Should Contain ${result} 5
${result} = Convert To Number ${result}
Should be true ${result}>${count}
${result} = Execute ozone auditparser /opt/hadoop/audit.db query "select count(*) from audit where op='CREATE_BUCKET' and RESULT='SUCCESS'"
Should Contain ${result} 5
${result} = Execute ozone auditparser /opt/hadoop/audit.db query "select count(*) from audit where RESULT='FAILURE'"
Should Contain ${result} 0
${result} = Convert To Number ${result}
Should be true ${result}>${count}

View File

@ -13,9 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Library OperatingSystem
Library String
Library BuiltIn
*** Variables ***
${SECURITY_ENABLED} %{SECURITY_ENABLED}
*** Keywords ***
Execute
[arguments] ${command}
${rc} ${output} = Run And Return Rc And Output ${command}
@ -35,3 +41,14 @@ Compare files
${checksumbefore} = Execute md5sum ${file1} | awk '{print $1}'
${checksumafter} = Execute md5sum ${file2} | awk '{print $1}'
Should Be Equal ${checksumbefore} ${checksumafter}
Install aws cli
${rc} ${output} = Run And Return Rc And Output which apt-get
Run Keyword if '${rc}' == '0' Install aws cli s3 debian
${rc} ${output} = Run And Return Rc And Output yum --help
Run Keyword if '${rc}' == '0' Install aws cli s3 centos
Kinit test user
${hostname} = Execute hostname
Set Suite Variable ${TEST_USER} testuser/${hostname}@EXAMPLE.COM
Execute kinit -k ${TEST_USER} -t /etc/security/keytabs/testuser.keytab

View File

@ -15,58 +15,66 @@
*** Settings ***
Resource ../commonlib.robot
Resource ../commonlib.robot
*** Variables ***
${OZONE_S3_HEADER_VERSION} v4
${OZONE_S3_SET_CREDENTIALS} true
${BUCKET} bucket-999
*** Keywords ***
Execute AWSS3APICli
[Arguments] ${command}
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} ${command}
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} ${command}
[return] ${output}
Execute AWSS3APICli and checkrc
[Arguments] ${command} ${expected_error_code}
${output} = Execute and checkrc aws s3api --endpoint-url ${ENDPOINT_URL} ${command} ${expected_error_code}
[Arguments] ${command} ${expected_error_code}
${output} = Execute and checkrc aws s3api --endpoint-url ${ENDPOINT_URL} ${command} ${expected_error_code}
[return] ${output}
Execute AWSS3Cli
[Arguments] ${command}
${output} = Execute aws s3 --endpoint-url ${ENDPOINT_URL} ${command}
${output} = Execute aws s3 --endpoint-url ${ENDPOINT_URL} ${command}
[return] ${output}
Install aws cli
${rc} ${output} = Run And Return Rc And Output which apt-get
Run Keyword if '${rc}' == '0' Install aws cli s3 debian
${rc} ${output} = Run And Return Rc And Output yum --help
Run Keyword if '${rc}' == '0' Install aws cli s3 centos
Install aws cli s3 centos
Execute sudo yum install -y awscli
Execute sudo yum install -y awscli
Install aws cli s3 debian
Execute sudo apt-get install -y awscli
Execute sudo apt-get install -y awscli
Setup v2 headers
Set Environment Variable AWS_ACCESS_KEY_ID ANYID
Set Environment Variable AWS_SECRET_ACCESS_KEY ANYKEY
Setup v4 headers
${result} = Execute ozone s3 getsecret
${accessKey} = Get Regexp Matches ${result} (?<=awsAccessKey=).*
${accessKey} = Get Variable Value ${accessKey} sdsdasaasdasd
${secret} = Get Regexp Matches ${result} (?<=awsSecret=).*
${len}= Get Length ${accessKey}
${accessKey}= Set Variable If ${len} > 0 ${accessKey[0]} kljdfslff
${len}= Get Length ${secret}
${secret}= Set Variable If ${len} > 0 ${secret[0]} dhafldhlf
Execute aws configure set default.s3.signature_version s3v4
Execute aws configure set aws_access_key_id default1
Execute aws configure set aws_secret_access_key defaultsecret
Execute aws configure set aws_access_key_id ${accessKey}
Execute aws configure set aws_secret_access_key ${secret}
Execute aws configure set region us-west-1
Setup incorrect credentials for S3
Execute aws configure set default.s3.signature_version s3v4
Execute aws configure set aws_access_key_id dlfknslnfslf
Execute aws configure set aws_secret_access_key dlfknslnfslf
Execute aws configure set region us-west-1
Create bucket
${postfix} = Generate Random String 5 [NUMBERS]
Set Suite Variable ${BUCKET} bucket-${postfix}
Execute AWSS3APICli create-bucket --bucket ${BUCKET}
Setup credentials
Run Keyword if '${OZONE_S3_HEADER_VERSION}' == 'v4' Setup v4 headers
Run Keyword if '${OZONE_S3_HEADER_VERSION}' != 'v4' Setup v2 headers
Setup s3 tests
Run Keyword Install aws cli
Run Keyword if '${OZONE_S3_SET_CREDENTIALS}' == 'true' Setup credentials
Run Keyword if '${OZONE_S3_SET_CREDENTIALS}' == 'true' Setup v4 headers
Run Keyword if '${BUCKET}' == 'generated' Create bucket

View File

@ -0,0 +1,49 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Smoke test to start cluster with docker-compose environments.
Library OperatingSystem
Library String
Library BuiltIn
Resource ../commonlib.robot
*** Variables ***
${ENDPOINT_URL} http://s3g:9878
*** Keywords ***
Setup volume names
${random} Generate Random String 2 [NUMBERS]
Set Suite Variable ${volume1} fstest${random}
Set Suite Variable ${volume2} fstest2${random}
*** Test Cases ***
Create volume bucket with wrong credentials
Execute kdestroy
${rc} ${output} = Run And Return Rc And Output ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root
Should contain ${output} Client cannot authenticate via
Create volume bucket with credentials
# Authenticate testuser
Run Keyword Kinit test user
Run Keyword Setup volume names
Execute ozone sh volume create o3://om/${volume1} --user bilbo --quota 100TB --root
Execute ozone sh volume create o3://om/${volume2} --user bilbo --quota 100TB --root
Execute ozone sh bucket create o3://om/${volume1}/bucket1
Execute ozone sh bucket create o3://om/${volume1}/bucket2
Execute ozone sh bucket create o3://om/${volume2}/bucket3
Check volume from ozonefs
${result} = Execute ozone fs -ls o3fs://bucket1.${volume1}/

View File

@ -0,0 +1,44 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Smoke test to start cluster with docker-compose environments.
Library OperatingSystem
Library String
Library BuiltIn
Resource ../commonlib.robot
Resource ../s3/commonawslib.robot
*** Variables ***
${ENDPOINT_URL} http://s3g:9878
*** Keywords ***
Setup volume names
${random} Generate Random String 2 [NUMBERS]
Set Suite Variable ${volume1} fstest${random}
Set Suite Variable ${volume2} fstest2${random}
*** Test Cases ***
Secure S3 test Success
Run Keyword Setup s3 tests
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} list-buckets
Should contain ${output} bucket-test123
Secure S3 test Failure
Run Keyword Setup incorrect credentials for S3
${rc} ${result} = Run And Return Rc And Output aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
Should Be True ${rc} > 0

View File

@ -1,154 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Smoke test to start cluster with docker-compose environments.
Library OperatingSystem
Library String
Resource ../commonlib.robot
*** Variables ***
${ENDPOINT_URL} http://s3g:9878
*** Keywords ***
Install aws cli s3 centos
Execute sudo yum install -y awscli
Install aws cli s3 debian
Execute sudo apt-get install -y awscli
Install aws cli
${rc} ${output} = Run And Return Rc And Output which apt-get
Run Keyword if '${rc}' == '0' Install aws cli s3 debian
${rc} ${output} = Run And Return Rc And Output yum --help
Run Keyword if '${rc}' == '0' Install aws cli s3 centos
Setup credentials
${hostname}= Execute hostname
Execute kinit -k testuser/${hostname}@EXAMPLE.COM -t /etc/security/keytabs/testuser.keytab
${result} = Execute ozone s3 getsecret
${accessKey} = Get Regexp Matches ${result} (?<=awsAccessKey=).*
${secret} = Get Regexp Matches ${result} (?<=awsSecret=).*
Execute aws configure set default.s3.signature_version s3v4
Execute aws configure set aws_access_key_id ${accessKey[0]}
Execute aws configure set aws_secret_access_key ${secret[0]}
Execute aws configure set region us-west-1
*** Test Cases ***
Create volume and bucket
${rc} ${output} = Run And Return Rc And Output ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root
Should contain ${output} Client cannot authenticate via
# Authenticate testuser
${hostname}= Execute hostname
Execute kinit -k testuser/${hostname}@EXAMPLE.COM -t /etc/security/keytabs/testuser.keytab
Execute ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root
Execute ozone sh volume create o3://om/fstest2 --user bilbo --quota 100TB --root
Execute ozone sh bucket create o3://om/fstest/bucket1
Execute ozone sh bucket create o3://om/fstest/bucket2
Execute ozone sh bucket create o3://om/fstest2/bucket3
Check volume from ozonefs
${result} = Execute ozone fs -ls o3fs://bucket1.fstest/
Run ozoneFS tests
Execute ozone fs -mkdir -p o3fs://bucket1.fstest/testdir/deep
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} testdir/deep
Execute ozone fs -copyFromLocal NOTICE.txt o3fs://bucket1.fstest/testdir/deep/
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} NOTICE.txt
Execute ozone fs -put NOTICE.txt o3fs://bucket1.fstest/testdir/deep/PUTFILE.txt
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} PUTFILE.txt
${result} = Execute ozone fs -ls o3fs://bucket1.fstest/testdir/deep/
Should contain ${result} NOTICE.txt
Should contain ${result} PUTFILE.txt
Execute ozone fs -mv o3fs://bucket1.fstest/testdir/deep/NOTICE.txt o3fs://bucket1.fstest/testdir/deep/MOVED.TXT
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} MOVED.TXT
Should not contain ${result} NOTICE.txt
Execute ozone fs -mkdir -p o3fs://bucket1.fstest/testdir/deep/subdir1
Execute ozone fs -cp o3fs://bucket1.fstest/testdir/deep/MOVED.TXT o3fs://bucket1.fstest/testdir/deep/subdir1/NOTICE.txt
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} subdir1/NOTICE.txt
${result} = Execute ozone fs -ls o3fs://bucket1.fstest/testdir/deep/subdir1/
Should contain ${result} NOTICE.txt
Execute ozone fs -cat o3fs://bucket1.fstest/testdir/deep/subdir1/NOTICE.txt
Should not contain ${result} Failed
Execute ozone fs -rm o3fs://bucket1.fstest/testdir/deep/subdir1/NOTICE.txt
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should not contain ${result} NOTICE.txt
${result} = Execute ozone fs -rmdir o3fs://bucket1.fstest/testdir/deep/subdir1/
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should not contain ${result} subdir1
Execute ozone fs -touch o3fs://bucket1.fstest/testdir/TOUCHFILE.txt
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should contain ${result} TOUCHFILE.txt
Execute ozone fs -rm -r o3fs://bucket1.fstest/testdir/
${result} = Execute ozone sh key list o3://om/fstest/bucket1 | grep -v WARN | jq -r '.[].keyName'
Should not contain ${result} testdir
Execute rm -Rf localdir1
Execute mkdir localdir1
Execute cp NOTICE.txt localdir1/LOCAL.txt
Execute ozone fs -mkdir -p o3fs://bucket1.fstest/testdir1
Execute ozone fs -copyFromLocal localdir1 o3fs://bucket1.fstest/testdir1/
Execute ozone fs -put NOTICE.txt o3fs://bucket1.fstest/testdir1/NOTICE.txt
${result} = Execute ozone fs -ls -R o3fs://bucket1.fstest/testdir1/
Should contain ${result} localdir1/LOCAL.txt
Should contain ${result} testdir1/NOTICE.txt
Execute ozone fs -mkdir -p o3fs://bucket2.fstest/testdir2
Execute ozone fs -mkdir -p o3fs://bucket3.fstest2/testdir3
Execute ozone fs -cp o3fs://bucket1.fstest/testdir1/localdir1 o3fs://bucket2.fstest/testdir2/
Execute ozone fs -cp o3fs://bucket1.fstest/testdir1/localdir1 o3fs://bucket3.fstest2/testdir3/
Execute ozone sh key put o3://om/fstest/bucket1/KEY.txt NOTICE.txt
${result} = Execute ozone fs -ls o3fs://bucket1.fstest/KEY.txt
Should contain ${result} KEY.txt
${rc} ${result} = Run And Return Rc And Output ozone fs -copyFromLocal NOTICE.txt o3fs://bucket1.fstest/KEY.txt
Should Be Equal As Integers ${rc} 1
Should contain ${result} File exists
Execute rm -Rf GET.txt
Execute ozone fs -get o3fs://bucket1.fstest/KEY.txt GET.txt
Execute ls -l GET.txt
${rc} ${result} = Run And Return Rc And Output ozone fs -ls o3fs://abcde.pqrs/
Should Be Equal As Integers ${rc} 1
Should contain ${result} not found
Secure S3 test Failure
Run Keyword Install aws cli
${rc} ${result} = Run And Return Rc And Output aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
Should Be True ${rc} > 0
Secure S3 test Success
Run Keyword Setup credentials
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} list-buckets
Should contain ${output} bucket-test123

View File

@ -69,6 +69,12 @@ execute_tests(){
echo " Output dir: $DIR/$RESULT_DIR"
echo " Command to rerun: ./test.sh --keep --env $COMPOSE_DIR $TESTS"
echo "-------------------------------------------------"
if [ ${COMPOSE_DIR} == "ozonesecure" ]; then
SECURITY_ENABLED="true"
else
SECURITY_ENABLED="false"
fi
docker-compose -f "$COMPOSE_FILE" down
docker-compose -f "$COMPOSE_FILE" up -d --scale datanode=3
wait_for_datanodes "$COMPOSE_FILE"
@ -78,7 +84,7 @@ execute_tests(){
TITLE="Ozone $TEST tests with $COMPOSE_DIR cluster"
set +e
OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}"
docker-compose -f "$COMPOSE_FILE" exec -T om python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
docker-compose -f "$COMPOSE_FILE" exec -e SECURITY_ENABLED="${SECURITY_ENABLED}" -T om python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
set -e
docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
done
@ -152,7 +158,7 @@ if [ "$RUN_ALL" = true ]; then
TESTS=("s3")
execute_tests ozones3 "${TESTS[@]}"
TESTS=("security")
execute_tests ozonesecure "${TESTS[@]}"
execute_tests ozonesecure .
else
execute_tests "$DOCKERENV" "${POSITIONAL[@]}"
fi