1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
|
#!/bin/bash -e
# author: Ole Schuett
function run_test {
TEST_COMMAND=("$@")
echo -en "Running \"${TEST_COMMAND[*]}\"... "
if "${TEST_COMMAND[@]}" &> test.out; then
echo "done."
else
echo -e "failed.\n\n"
tail -n 100 test.out
mkdir -p /workspace/artifacts/
cp test.out /workspace/artifacts/
echo -e "\nSummary: Test \"${TEST_COMMAND[*]}\" failed."
echo -e "Status: FAILED\n"
exit 0
fi
}
#===============================================================================
cd /opt/cp2k
echo "Using $(python3 --version) and the following packages:"
pip3 freeze
echo ""
# prepare inputs for minimax_to_fortran_source.py
unzip -q -d ./tools/minimax_tools/1_xData 1_xData.zip
run_test ./tools/precommit/format_fortran_test.py
run_test ./tools/minimax_tools/minimax_to_fortran_source.py --check
run_test ./tools/docker/generate_dockerfiles.py --check
# Test pao-ml training.
run_test ./tools/pao-ml/pao-train.py --kind=H --epochs=200 ./tools/pao-ml/example.pao
run_test ./tools/pao-ml/pao-retrain.py --model="DZVP-MOLOPT-GTH-PAO4-H.pt" --epochs=200 ./tools/pao-ml/example.pao
run_test ./tools/pao-ml/pao-validate.py --threshold=1e-1 --model="DZVP-MOLOPT-GTH-PAO4-H.pt" ./tools/pao-ml/example.pao
run_test ./tools/pao-ml/pao-validate.py --threshold=1e-6 --model="tests/QS/regtest-pao-5/DZVP-MOLOPT-GTH-PAO4-H.pt" ./tools/pao-ml/example.pao
run_test ./tools/pao-ml/pao-validate.py --threshold=1e-5 --model="tests/QS/regtest-pao-5/DZVP-MOLOPT-GTH-PAO4-O.pt" ./tools/pao-ml/example.pao
run_test mypy --strict ./tools/pao-ml/
run_test mypy --strict ./tools/minimax_tools/minimax_to_fortran_source.py
run_test mypy --strict ./tools/dashboard/generate_dashboard.py
run_test mypy --strict ./tools/dashboard/generate_regtest_survey.py
run_test mypy --strict ./tools/regtesting/optimize_test_dirs.py
run_test mypy --strict ./tools/precommit/precommit.py
run_test mypy --strict ./tools/precommit/check_file_properties.py
run_test mypy --strict ./tools/precommit/format_makefile.py
run_test mypy --strict ./tools/precommit/format_input_file.py
run_test mypy --strict ./tools/docker/generate_dockerfiles.py
run_test mypy --strict ./tools/conventions/analyze_gfortran_ast.py
run_test mypy --strict ./tests/do_regtest.py
run_test mypy --strict ./docs/generate_input_reference.py
run_test mypy --strict ./docs/fix_github_links.py
# TODO: Find a way to test generate_dashboard.py without git repository.
#
# # Test generate_dashboard.py. Running it twice to also execute its caching.
# mkdir -p /workspace/artifacts/dashboard
# for _ in {1..2}; do
# run_test ./tools/dashboard/generate_dashboard.py \
# ./tools/dashboard/dashboard.conf \
# /workspace/artifacts/dashboard/status.pickle \
# /workspace/artifacts/dashboard/
# done
run_test cmake -DCP2K_ENABLE_CONSISTENCY_CHECKS=ON .
echo ""
echo "Summary: Miscellaneous tests passed"
echo "Status: OK"
#EOF
|