Skip to content

Commit 407df4e

Browse files
github actions utest (ufs-community#169)
* add ci-related files. update utest * modify to auto-run different cases based on ci.test * Improve CI workflow * Separate builds and tests into different jobs * Separate input data from build image; instead, use it as a volume before running tests * Make ci subdirectory to contain ci-related files * add clean-up after utest run * add workflow manage files * move parsing in main.yml to a separate script file
1 parent 3fb6556 commit 407df4e

21 files changed

+2279
-789
lines changed

.dockerignore

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Dockerfile
2+
.git
3+
.gitignore
4+
tests/*.log
5+
tests/log_ut_linux.gnu

.github/workflows/main.yml

+121
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
name: Pull Request Tests
2+
3+
on:
4+
push:
5+
branches:
6+
- develop
7+
pull_request:
8+
branches:
9+
- develop
10+
11+
jobs:
12+
setup:
13+
name: Set up
14+
runs-on: ubuntu-latest
15+
16+
outputs:
17+
tn: ${{ steps.parse.outputs.tn }}
18+
bld: ${{ steps.parse.outputs.bld }}
19+
test: ${{ steps.parse.outputs.test }}
20+
img: ${{ steps.parse.outputs.img }}
21+
22+
steps:
23+
- name: Checkout codes
24+
uses: actions/checkout@v2
25+
26+
- name: Parse cases
27+
id: parse
28+
run: |
29+
cd ${GITHUB_WORKSPACE}/tests/ci
30+
parsed_output=( $(./parse.sh) )
31+
name_=${parsed_output[0]}
32+
bld_=${parsed_output[1]}
33+
test_=${parsed_output[2]}
34+
img_=${parsed_output[3]}
35+
36+
echo "::set-output name=tn::$name_"
37+
echo "::set-output name=bld::$bld_"
38+
echo "::set-output name=test::$test_"
39+
echo "::set-output name=img::$img_"
40+
41+
echo "test name : $name_"
42+
echo "build set : $bld_"
43+
echo "test set : $test_"
44+
echo "image name: $img_"
45+
46+
build:
47+
name: Build (${{ matrix.bld_set }})
48+
needs: setup
49+
runs-on: ubuntu-latest
50+
51+
strategy:
52+
fail-fast: false
53+
matrix: ${{ fromJson(needs.setup.outputs.bld) }}
54+
55+
steps:
56+
- name: Checkout codes
57+
uses: actions/checkout@v2
58+
with:
59+
submodules: recursive
60+
61+
- name: Build
62+
run: |
63+
printf '{\n "experimental": true\n}' | sudo tee /etc/docker/daemon.json >/dev/null
64+
sudo systemctl restart docker
65+
sleep 10
66+
cd tests/ci && ./ci.sh -n ${{ needs.setup.outputs.tn }} -b ${{ matrix.bld_set }}
67+
68+
- name: Free up disk space
69+
run: |
70+
sudo docker rmi $(sudo docker image ls | grep -E -m1 '<none>' | awk '{ print $3 }')
71+
sudo docker rmi $(sudo docker image ls | awk '/ci-test-base/ { print $3 }')
72+
73+
- name: Prepare artifacts
74+
run: |
75+
cd tests/ci
76+
sudo docker save ${{ needs.setup.outputs.img }} | gzip >${{ needs.setup.outputs.img }}.tar.gz
77+
tar cvjf artifact.tar.bz2 ${{ needs.setup.outputs.img }}.tar.gz ci.sh ci.test
78+
79+
- name: Upload artifacts
80+
uses: actions/upload-artifact@v2
81+
with:
82+
name: ${{ matrix.bld_set }}.artifact.tar.bz2
83+
path: tests/ci/artifact.tar.bz2
84+
85+
utest:
86+
name: Unit test (${{ needs.setup.outputs.tn }}, ${{ matrix.test_set }})
87+
needs: [setup,build]
88+
runs-on: ubuntu-latest
89+
#runs-on: self-hosted
90+
91+
strategy:
92+
fail-fast: false
93+
matrix: ${{ fromJson(needs.setup.outputs.test) }}
94+
95+
steps:
96+
- name: Download artifacts
97+
uses: actions/download-artifact@v2
98+
with:
99+
name: ${{ matrix.artifact }}.artifact.tar.bz2
100+
101+
- name: Prepare artifacts
102+
run: |
103+
tar xvjf artifact.tar.bz2 && rm -f artifact.tar.bz2
104+
sudo docker load --input ${{ needs.setup.outputs.img }}.tar.gz && rm -f ${{ needs.setup.outputs.img }}.tar.gz
105+
106+
- name: Run utest
107+
run: ./ci.sh -n ${{ needs.setup.outputs.tn }} -r ${{ matrix.test_set }}
108+
109+
- name: Upload memory usage file
110+
if: ${{ always() }}
111+
uses: actions/upload-artifact@v2
112+
with:
113+
name: memory_stat_${{ matrix.test_set }}
114+
path: memory_stat
115+
116+
- name: Clean up
117+
if: ${{ always() }}
118+
run: |
119+
rm -f ci.sh ci.test
120+
sudo docker rm my-container && sudo docker rmi ${{ needs.setup.outputs.img }}:latest
121+
sudo docker volume rm DataVolume

.github/workflows/manage.yml

+54
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
name: Manage workflows
2+
3+
on:
4+
workflow_run:
5+
workflows: ["Pull Request Tests"]
6+
types:
7+
- requested
8+
9+
jobs:
10+
job1:
11+
name: Job 1
12+
runs-on: ubuntu-latest
13+
14+
steps:
15+
- name: Checkout codes
16+
uses: actions/checkout@v2
17+
18+
- name: Check if skip-ci is requested
19+
run: |
20+
cd ${GITHUB_WORKSPACE}/tests/ci
21+
repo="${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/actions/runs"
22+
tr_id=$(cat ${GITHUB_EVENT_PATH} | ./json_helper.py get_trigger_id)
23+
tr_br=$(cat ${GITHUB_EVENT_PATH} | ./json_helper.py get_trigger_br)
24+
check=$(cat ${GITHUB_EVENT_PATH} | ./json_helper.py check_skip)
25+
echo "::set-env name=TRIGGER_ID::${tr_id}"
26+
echo "::set-env name=TRIGGER_BR::${tr_br}"
27+
echo "skip-ci: ${check}"
28+
if [[ $check == yes ]]; then
29+
echo "skip-ci is requested"
30+
echo '::set-env name=CURR_JOB::cancelled'
31+
curl -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" ${repo}/$tr_id/cancel
32+
else
33+
echo '::set-env name=CURR_JOB::running'
34+
fi
35+
36+
- name: Cancel redundant jobs
37+
run: |
38+
echo "CURR_JOB is $CURR_JOB"
39+
echo "TRIGGER_ID is $TRIGGER_ID"
40+
echo "TRIGGER_BR is $TRIGGER_BR"
41+
export GITHUB_ACTOR
42+
export GITHUB_RUN_ID
43+
export TRIGGER_ID
44+
export TRIGGER_BR
45+
cd ${GITHUB_WORKSPACE}/tests/ci
46+
repo="${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/actions/runs"
47+
cancel_ids=$(curl -H "Accept: application/vnd.github.v3+json" ${repo} | ./json_helper.py cancel_workflow)
48+
echo "cancel ids: $cancel_ids"
49+
if [[ $cancel_ids != '' ]]; then
50+
for i in $cancel_ids; do
51+
curl -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" ${repo}/$i/cancel
52+
done
53+
fi
54+
if: ${{ env.CURR_JOB == 'running' }}

modulefiles/linux.gnu/fv3

+29
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,19 @@ export F77=${F77:-mpif77}
1515
export F90=${F90:-mpif90}
1616
export FC=${FC:-mpif90}
1717

18+
##
19+
## set up variables for ../cmake/configure_linux.gnu.cmake
20+
##
21+
export CMAKE_Platform=linux.gnu
22+
export CMAKE_C_COMPILER=${CC}
23+
export CMAKE_CXX_COMPILER=${CXX}
24+
export CMAKE_Fortran_COMPILER=${FC}
25+
26+
##
27+
## use own NetCDF library
28+
##
29+
export NETCDF=${NETCDF:-/usr/local}
30+
1831
##
1932
## use SIONlib library if installed and environment variable is set
2033
##
@@ -24,3 +37,19 @@ if [ ! "x$SIONLIB" == "x" ]; then
2437
export SIONLIB_INC="-I${SIONLIB}/include -I${SIONLIB}/include/mod_64"
2538
export SIONLIB_LIB="-L${SIONLIB}/lib -lsionmpi_f90_64 -lsionser_f90_64 -lsionmpi_64 -lsiongen_64 -lsionser_64 -lsioncom_64 -lsioncom_64_lock_none"
2639
fi
40+
41+
##
42+
## use pre-compiled EMSF library for above compiler / MPI combination
43+
##
44+
export ESMFMKFILE=${ESMFMKFILE:-/usr/local/lib/esmf.mk}
45+
46+
##
47+
## NCEP libraries (need to download and build manually, see doc/README_{UBUNTU,CENTOS,...}.txt and https://github.com/NCAR/NCEPlibs)
48+
##
49+
export NCEPLIBS_DIR=${NCEPLIBS_DIR:-/usr/local/NCEPlibs}
50+
export bacio_DIR=${NCEPLIBS_DIR}/bacio-2.4.0
51+
export nemsio_DIR=${NCEPLIBS_DIR}/nemsio-2.5.1
52+
export w3nco_DIR=${NCEPLIBS_DIR}/w3nco-2.4.0
53+
export sp_DIR=${NCEPLIBS_DIR}/sp-2.3.0
54+
export w3emc_DIR=${NCEPLIBS_DIR}/w3emc-2.7.0
55+
export sigio_DIR=${NCEPLIBS_DIR}/sigio-2.3.0

0 commit comments

Comments
 (0)