HDDS-1424. Support multi-container robot test execution

Closes #726
This commit is contained in:
Márton Elek 2019-05-07 17:48:10 +02:00
parent 49e1292ea3
commit 1a696cc761
No known key found for this signature in database
GPG Key ID: D51EA8F00EE79B28
14 changed files with 473 additions and 146 deletions

View File

@ -122,3 +122,6 @@ run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/smoketest" .
run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/blockade" . run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/blockade" .
run cp -p -r "${ROOT}/hadoop-ozone/dist/target/k8s" kubernetes run cp -p -r "${ROOT}/hadoop-ozone/dist/target/k8s" kubernetes
run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/Dockerfile" . run cp -p -r "${ROOT}/hadoop-ozone/dist/src/main/Dockerfile" .
#workaround for https://issues.apache.org/jira/browse/MRESOURCES-236
find ./compose -name "*.sh" -exec chmod 755 {} \;

View File

@ -140,7 +140,7 @@
<version>3.1.0</version> <version>3.1.0</version>
<executions> <executions>
<execution> <execution>
<id>copy-compose</id> <id>copy-compose-files</id>
<phase>compile</phase> <phase>compile</phase>
<goals> <goals>
<goal>copy-resources</goal> <goal>copy-resources</goal>

View File

@ -0,0 +1,35 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
start_docker_env
#Due to the limitation of the current auditparser test, it should be the
#first test in a clean cluster.
execute_robot_test scm auditparser
execute_robot_test scm basic/basic.robot
stop_docker_env
generate_report

View File

@ -0,0 +1,39 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
start_docker_env
execute_robot_test scm ozonefs/ozonefs.robot
## TODO: As of now the o3fs tests are unstable.
export OZONE_HOME=/opt/ozone
#execute_robot_test hadoop32 ozonefs/hadoopo3fs.robot
#execute_robot_test hadoop31 ozonefs/hadoopo3fs.robot
stop_docker_env
generate_report

View File

@ -0,0 +1,30 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
start_docker_env
execute_robot_test scm basic/basic.robot
stop_docker_env
generate_report

View File

@ -0,0 +1,32 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
start_docker_env
execute_robot_test scm basic/basic.robot
execute_robot_test scm s3
stop_docker_env
generate_report

View File

@ -0,0 +1,40 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
export SECURITY_ENABLED=true
start_docker_env
execute_robot_test scm kinit.robot
execute_robot_test scm basic
execute_robot_test scm security
execute_robot_test scm ozonefs/ozonefs.robot
execute_robot_test scm s3
stop_docker_env
generate_report

View File

@ -0,0 +1,47 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Test executor to test all the compose/*/test.sh test scripts.
#
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )
ALL_RESULT_DIR="$SCRIPT_DIR/result"
mkdir -p "$ALL_RESULT_DIR"
rm "$ALL_RESULT_DIR/*"
RESULT=0
IFS=$'\n'
# shellcheck disable=SC2044
for test in $(find $SCRIPT_DIR -name test.sh); do
echo "Executing test in $(dirname "$test")"
#required to read the .env file from the right location
cd "$(dirname "$test")" || continue
$test
ret=$?
if [[ $ret -ne 0 ]]; then
RESULT=-1
fi
RESULT_DIR="$(dirname "$test")/result"
cp "$RESULT_DIR"/robot-*.xml "$ALL_RESULT_DIR"
done
docker run --rm -v "$SCRIPT_DIR/result:/opt/result" apache/hadoop-runner rebot -N "smoketests" -d "/opt/result" "/opt/result/robot-*.xml"
exit $RESULT

View File

@ -0,0 +1,53 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Single test executor, can start a single robot test in any running container.
#
COMPOSE_DIR="$PWD"
export COMPOSE_DIR
if [[ ! -f "$COMPOSE_DIR/docker-compose.yaml" ]]; then
echo "docker-compose.yaml is missing from the current dir. Please run this command from a docker-compose environment."
exit 1
fi
if (( $# != 2 )); then
cat << EOF
Single test executor
Usage:
../test-single.sh <container> <robot_test>
container: Name of the running docker-compose container (docker-compose.yaml is required in the current directory)
robot_test: name of the robot test or directory relative to the smoketest dir.
EOF
fi
# shellcheck source=testlib.sh
source "$COMPOSE_DIR/../testlib.sh"
execute_robot_test "$1" "$2"
generate_report

98
hadoop-ozone/dist/src/main/compose/testlib.sh vendored Executable file
View File

@ -0,0 +1,98 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
COMPOSE_ENV_NAME=$(basename "$COMPOSE_DIR")
COMPOSE_FILE=$COMPOSE_DIR/docker-compose.yaml
RESULT_DIR="$COMPOSE_DIR/result"
RESULT_DIR_INSIDE="${OZONE_DIR:-/opt/hadoop}/compose/$(basename "$COMPOSE_ENV_NAME")/result"
SMOKETEST_DIR_INSIDE="${OZONE_DIR:-/opt/hadoop}/smoketest"
#delete previous results
rm -rf "$RESULT_DIR"
mkdir -p "$RESULT_DIR"
#Should be writeable from the docker containers where user is different.
chmod ogu+w "$RESULT_DIR"
## @description wait until 3 datanodes are up (or 30 seconds)
## @param the docker-compose file
wait_for_datanodes(){
#Reset the timer
SECONDS=0
#Don't give it up until 30 seconds
while [[ $SECONDS -lt 90 ]]; do
#This line checks the number of HEALTHY datanodes registered in scm over the
# jmx HTTP servlet
datanodes=$(docker-compose -f "$1" exec -T scm curl -s 'http://localhost:9876/jmx?qry=Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo' | jq -r '.beans[0].NodeCount[] | select(.key=="HEALTHY") | .value')
if [[ "$datanodes" == "3" ]]; then
#It's up and running. Let's return from the function.
echo "$datanodes datanodes are up and registered to the scm"
return
else
#Print it only if a number. Could be not a number if scm is not yet started
if [[ "$datanodes" ]]; then
echo "$datanodes datanode is up and healthy (until now)"
fi
fi
sleep 2
done
echo "WARNING! Datanodes are not started successfully. Please check the docker-compose files"
}
## @description Starts a docker-compose based test environment
start_docker_env(){
docker-compose -f "$COMPOSE_FILE" down
docker-compose -f "$COMPOSE_FILE" up -d --scale datanode=3
wait_for_datanodes "$COMPOSE_FILE"
sleep 10
}
## @description Execute robot tests in a specific container.
## @param Name of the container in the docker-compose file
## @param robot test file or directory relative to the smoketest dir
execute_robot_test(){
CONTAINER="$1"
TEST="$2"
TEST_NAME=$(basename "$TEST")
TEST_NAME=${TEST_NAME%.*}
set +e
OUTPUT_NAME="$COMPOSE_ENV_NAME-$TEST_NAME-$CONTAINER"
docker-compose -f "$COMPOSE_FILE" exec -e SECURITY_ENABLED="${SECURITY_ENABLED}" -T "$CONTAINER" python -m robot --log NONE -N "$TEST_NAME" --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "$RESULT_DIR_INSIDE/robot-$OUTPUT_NAME.xml" "$SMOKETEST_DIR_INSIDE/$TEST"
set -e
}
## @description Stops a docker-compose based test environment (with saving the logs)
stop_docker_env(){
docker-compose -f "$COMPOSE_FILE" logs > "$RESULT_DIR/docker-$OUTPUT_NAME.log"
if [ "${KEEP_RUNNING:-false}" = false ]; then
docker-compose -f "$COMPOSE_FILE" down
fi
}
## @description Generate robot framework reports based on the saved results.
generate_report(){
#Generate the combined output and return with the right exit code (note: robot = execute test, rebot = generate output)
docker run --rm -v "$DIR/..:${OZONE_DIR:-/opt/hadoop}" apache/hadoop-runner rebot -d "$RESULT_DIR_INSIDE" "$RESULT_DIR_INSIDE/robot-*.xml"
}

View File

@ -27,4 +27,37 @@ The argument of the `robot` could be any robot file or directory.
The current configuration in the robot files (hostnames, ports) are adjusted for the docker-based setup but you can easily modify it for any environment. The current configuration in the robot files (hostnames, ports) are adjusted for the docker-based setup but you can easily modify it for any environment.
The `./test.sh` in this directory can start multiple type of clusters (ozone standalone or ozone + hdfs) and execute the test framework with all of the clusters. # Run tests in docker environment
In the ./compose folder there are additional test scripts to make it easy to run all tests or run a specific test in a docker environment.
## Test one environment
Go to the compose directory and execute the test.sh directly from there:
```
cd compose/ozone
./test.sh
```
The results will be saved to the `compose/ozone/results`
## Run all the tests
```
cd compose
./test-all.sh
```
The results will be combined to the `compose/results` folder.
## Run one specific test case
Start the compose environment and execute test:
```
cd compose/ozone
docker-compose up -d
#wait....
../test-single.sh scm basic/basic.robot
```

View File

@ -0,0 +1,25 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Kinit test user
Library OperatingSystem
Resource commonlib.robot
Test Timeout 2 minute
*** Test Cases ***
Kinit
Kinit test user

View File

@ -0,0 +1,32 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*** Settings ***
Documentation Test ozone fs with hadoopfs
Library OperatingSystem
Library String
Resource ../commonlib.robot
*** Variables ***
${DATANODE_HOST} datanode
${PREFIX} ozone
*** Test cases ***
Test hadoop dfs
${random} = Generate Random String 5 [NUMBERS]
${result} = Execute hdfs dfs -put /opt/hadoop/NOTICE.txt o3fs://bucket1.vol1/${PREFIX}-${random}
${result} = Execute hdfs dfs -ls o3fs://bucket1.vol1/
Should contain ${PREFIX}-${random}

View File

@ -14,154 +14,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
RESULT_DIR=result RESULT_DIR=result
#delete previous results #delete previous results
rm -rf "${DIR:?}/$RESULT_DIR" rm -rf "${DIR:?}/$RESULT_DIR"
mkdir -p "$DIR/$RESULT_DIR"
#Should be writeable from the docker containers where user is different.
chmod ogu+w "$DIR/$RESULT_DIR"
## @description wait until 3 datanodes are up (or 30 seconds) REPLACEMENT="$DIR/../compose/test-all.sh"
## @param the docker-compose file echo "THIS SCRIPT IS DEPRECATED. Please use $REPLACEMENT instead."
wait_for_datanodes(){
#Reset the timer ${REPLACEMENT}
SECONDS=0
#Don't give it up until 30 seconds cp -r "$DIR/../compose/result" "$DIR"
while [[ $SECONDS -lt 30 ]]; do
#This line checks the number of HEALTHY datanodes registered in scm over the
# jmx HTTP servlet
datanodes=$(docker-compose -f "$1" exec -T scm curl -s 'http://localhost:9876/jmx?qry=Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo' | jq -r '.beans[0].NodeCount[] | select(.key=="HEALTHY") | .value')
if [[ "$datanodes" == "3" ]]; then
#It's up and running. Let's return from the function.
echo "$datanodes datanodes are up and registered to the scm"
return
else
#Print it only if a number. Could be not a number if scm is not yet started
if [[ "$datanodes" ]]; then
echo "$datanodes datanode is up and healthy (until now)"
fi
fi
sleep 2
done
echo "WARNING! Datanodes are not started successfully. Please check the docker-compose files"
}
## @description Execute selected test suites in a specified docker-compose engironment
## @param the name of the docker-compose env relative to ../compose
## @param the name of the tests (array of subdir names of the dir of this script)
execute_tests(){
COMPOSE_DIR=$1
COMPOSE_FILE=$DIR/../compose/$COMPOSE_DIR/docker-compose.yaml
TESTS=$2
echo "-------------------------------------------------"
echo "Executing test(s): [${TESTS[*]}]"
echo ""
echo " Cluster type: $COMPOSE_DIR"
echo " Compose file: $COMPOSE_FILE"
echo " Output dir: $DIR/$RESULT_DIR"
echo " Command to rerun: ./test.sh --keep --env $COMPOSE_DIR $TESTS"
echo "-------------------------------------------------"
if [ ${COMPOSE_DIR} == "ozonesecure" ]; then
SECURITY_ENABLED="true"
else
SECURITY_ENABLED="false"
fi
docker-compose -f "$COMPOSE_FILE" down
docker-compose -f "$COMPOSE_FILE" up -d --scale datanode=3
wait_for_datanodes "$COMPOSE_FILE"
#TODO: we need to wait for the OM here
sleep 10
for TEST in "${TESTS[@]}"; do
TITLE="Ozone $TEST tests with $COMPOSE_DIR cluster"
set +e
OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}"
docker-compose -f "$COMPOSE_FILE" exec -e SECURITY_ENABLED="${SECURITY_ENABLED}" -T om python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
set -e
docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
done
if [ "$KEEP_RUNNING" = false ]; then
docker-compose -f "$COMPOSE_FILE" down
fi
}
RUN_ALL=true
KEEP_RUNNING=false
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--env)
DOCKERENV="$2"
RUN_ALL=false
shift # past argument
shift # past value
;;
--keep)
KEEP_RUNNING=true
shift # past argument
;;
--help|-h|-help)
cat << EOF
Acceptance test executor for ozone.
This is a lightweight test executor for ozone.
You can run it with
./test.sh
Which executes all the tests in all the available environments.
Or you can run manually one test with
./test.sh --keep --env ozone-hdfs basic
--keep means that docker cluster won't be stopped after the test (optional)
--env defines the subdirectory under the compose dir
The remaining parameters define the test suites under smoketest dir.
Could be any directory or robot file relative to the smoketest dir.
EOF
exit 0
;;
*)
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
if [ "$RUN_ALL" = true ]; then
#
# This is the definition of the ozone acceptance test suite
#
# We select the test suites and execute them on multiple type of clusters
#
TESTS=("basic")
execute_tests ozone "${TESTS[@]}"
TESTS=("auditparser")
execute_tests ozone "${TESTS[@]}"
TESTS=("ozonefs")
execute_tests ozonefs "${TESTS[@]}"
TESTS=("basic")
execute_tests ozone-hdfs "${TESTS[@]}"
TESTS=("s3")
execute_tests ozones3 "${TESTS[@]}"
TESTS=("security")
execute_tests ozonesecure .
else
execute_tests "$DOCKERENV" "${POSITIONAL[@]}"
fi
#Generate the combined output and return with the right exit code (note: robot = execute test, rebot = generate output)
docker run --rm -v "$DIR/..:/opt/hadoop" apache/hadoop-runner rebot -d "smoketest/$RESULT_DIR" "smoketest/$RESULT_DIR/robot-*.xml"