Add alternate standard names entry to fieldlists and varlistEntry objects #1866
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This file builds and runs a lightweight version of the MDTF test suite. | |
# Note that the tests assess functionality of the diagnostics, | |
# and do not evaluate data or scientific content. | |
name: MDTF_test | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
branches: | |
- main | |
defaults: | |
run: | |
shell: bash -l {0} | |
jobs: | |
build: | |
runs-on: ${{matrix.os}} | |
strategy: | |
matrix: | |
os: [ubuntu-latest, macos-13] | |
json-file-1a: ["tests/github_actions_test_ubuntu_1a.jsonc","tests/github_actions_test_macos_1a.jsonc"] | |
json-file-1b: ["tests/github_actions_test_ubuntu_1b.jsonc","tests/github_actions_test_macos_1b.jsonc"] | |
json-file-2: ["tests/github_actions_test_ubuntu_2.jsonc", "tests/github_actions_test_macos_2.jsonc"] | |
json-file-3: ["tests/github_actions_test_ubuntu_3.jsonc", "tests/github_actions_test_macos_3.jsonc"] | |
# if experimental is true, other jobs to run if one fails | |
experimental: [false] | |
exclude: | |
- os: ubuntu-latest | |
json-file-1a: "tests/github_actions_test_macos_1a.jsonc" | |
- os: ubuntu-latest | |
json-file-1b: "tests/github_actions_test_macos_1b.jsonc" | |
- os: ubuntu-latest | |
json-file-2: "tests/github_actions_test_macos_2.jsonc" | |
- os: ubuntu-latest | |
json-file-3: "tests/github_actions_test_macos_3.jsonc" | |
- os: macos-13 | |
json-file-1a: "tests/github_actions_test_ubuntu_1a.jsonc" | |
- os: macos-13 | |
json-file-1b: "tests/github_actions_test_ubuntu_1b.jsonc" | |
- os: macos-13 | |
json-file-2: "tests/github_actions_test_ubuntu_2.jsonc" | |
- os: macos-13 | |
json-file-3: "tests/github_actions_test_ubuntu_3.jsonc" | |
max-parallel: 3 | |
steps: | |
- uses: actions/checkout@v3 | |
#- name: Download Miniconda 3 | |
# uses: conda-incubator/setup-miniconda@v2 | |
# with: | |
# miniconda-version: "latest" | |
# python-version: 3.11.0 | |
# channels: conda-forge | |
#- name: Conda info | |
# shell: bash -el {0} | |
# run: conda info | |
# Set up Micromamba | |
- uses: mamba-org/setup-micromamba@v1 | |
with: | |
init-shell: bash | |
condarc: | | |
channels: | |
- conda-forge | |
- name: Set conda environment variables for macOS | |
if: ${{ matrix.os == 'macos-13' }} | |
run: | | |
echo "CONDA_ROOT=$(echo /Users/runner/micromamba)" >> $GITHUB_ENV | |
echo "MICROMAMBA_EXE=$(echo /Users/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV | |
echo "CONDA_ENV_DIR=$(echo /Users/runner/micromamba/envs)" >> $GITHUB_ENV | |
- name: Set conda environment variables for ubuntu | |
if: ${{ matrix.os == 'ubuntu-latest' }} | |
run: | | |
echo "MICROMAMBA_EXE=$(echo /home/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV | |
echo "CONDA_ROOT=$(echo /home/runner/micromamba)" >> $GITHUB_ENV | |
echo "CONDA_ENV_DIR=$(echo /home/runner/micromamba/envs)" >> $GITHUB_ENV | |
- name: Install Conda Environments | |
run: | | |
echo "Installing Conda Environments" | |
echo "conda root ${CONDA_ROOT}" | |
echo "env dir ${CONDA_ENV_DIR}" | |
# MDTF-specific setup: install all conda envs | |
./src/conda/micromamba_env_setup.sh --all --micromamba_root ${CONDA_ROOT} --micromamba_exe ${MICROMAMBA_EXE} --env_dir ${CONDA_ENV_DIR} | |
echo "Creating the _MDTF_synthetic_data environment" | |
micromamba create -y -f ./src/conda/_env_synthetic_data.yml | |
- name: Generate Model Data | |
run: | | |
cd ../ | |
echo "${PWD}" | |
micromamba activate _MDTF_synthetic_data | |
pip install mdtf-test-data | |
mkdir mdtf_test_data ; cd mdtf_test_data | |
# generate the data and run unit tests | |
mdtf_synthetic.py -c GFDL --startyear 1 --nyears 10 --unittest | |
mdtf_synthetic.py -c NCAR --startyear 1975 --nyears 7 | |
mdtf_synthetic.py -c CMIP --startyear 1990 --nyears 20 | |
cd ../ | |
mkdir wkdir | |
## make input data directories | |
mkdir -p inputdata/obs_data | |
- name: Get Observational Data for Set 1a | |
run: | | |
echo "${PWD}" | |
cd ../ | |
echo "Available Space" | |
df -h | |
# attempt FTP data fetch | |
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual | |
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos. | |
# curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar | |
echo "Untarring set 1a NCAR/CESM standard test files" | |
# tar -xvf EOF_500hPa_obs_data.tar | |
tar -xvf precip_diurnal_cycle_obs_data.tar | |
tar -xvf Wheeler_Kiladis_obs_data.tar | |
# clean up tarballs | |
rm -f *.tar | |
- name: Run diagnostic tests set 1a | |
run: | | |
echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV | |
echo "POD_OUTPUT is " | |
echo "${POD_OUTPUT}" | |
micromamba activate _MDTF_base | |
# trivial check that install script worked | |
./mdtf_framework.py --help | |
# run the test PODs | |
./mdtf -f ${{matrix.json-file-1a}} | |
# Debug POD log(s) | |
# cat ${POD_OUTPUT}/MDTF_NCAR.Synthetic_1975_1981/Wheeler_Kiladis/Wheeler_Kiladis.log | |
- name: Get observational data for set 1b | |
run: | | |
# clean up data from previous runs | |
echo "deleting data from set 1a" | |
cd ../wkdir | |
rm -rf * | |
cd ../inputdata/obs_data | |
rm -rf * | |
cd ../../ | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar | |
tar -xvf MJO_teleconnection_obs_data.tar | |
tar -xvf MJO_suite_obs_data.tar | |
tar -xvf convective_transition_diag_obs_data.tar | |
# clean up tarballs | |
rm -f *.tar | |
- name: Run diagnostic tests set 1b | |
run: | | |
./mdtf -f ${{matrix.json-file-1b}} | |
- name: Get observational data for set 2 | |
run: | | |
echo "${PWD}" | |
# remove data from previous run | |
# Actions moves you to the root repo directory in every step, so need to cd again | |
echo "deleting data from set 1b" | |
cd ../wkdir | |
rm -rf * | |
cd ../inputdata/obs_data | |
rm -rf * | |
cd ../../ | |
echo "Available Space" | |
df -h | |
# attempt FTP data fetch | |
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual | |
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos. | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_prop_amp_obs_data.tar --output MJO_prop_amp_obs_data.tar | |
echo "Untarring set 2 GFDL standard test files" | |
tar -xvf MJO_prop_amp_obs_data.tar | |
# clean up tarballs | |
rm -f *.tar | |
- name: Run diagnostic tests set 2 | |
run: | | |
./mdtf -f ${{matrix.json-file-2}} | |
# Uncomment the following line for debugging | |
#cat ../wkdir/MDTF_GFDL.Synthetic_1_10/MJO_prop_amp/MJO_prop_amp.log | |
- name: Get observational data for set 3 | |
run: | | |
echo "${PWD}" | |
# remove data from previous run | |
# Actions moves you to the root repo directory in every step, so need to cd again | |
echo "deleting data from set 2" | |
cd ../wkdir | |
rm -rf * | |
cd ../inputdata/obs_data | |
rm -rf * | |
cd ../../ | |
echo "Available Space" | |
df -h | |
# attempt FTP data fetch | |
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual | |
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos. | |
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar | |
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz | |
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar | |
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar | |
# curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/albedofb_obs_data.tar --output albedofb_obs_data.tar | |
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/seaice_suite_obs_data.tar --output seaice_suite_obs_data.tar | |
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/stc_eddy_heat_fluxes_obs_data.tar --output stc_eddy_heat_fluxes_obs_data.tar | |
echo "Untarring set 3 CMIP standard test files" | |
#tar -xvf temp_extremes_distshape_obs_data.tar | |
#tar -zxvf tropical_pacific_sea_level_obs_data.tar.gz | |
#tar -xvf mixed_layer_depth_obs_data.tar | |
tar -xvf ocn_surf_flux_diag_obs_data.tar | |
# tar -xvf albedofb_obs_data.tar | |
# tar -xvf seaice_suite_obs_data.tar | |
# tar -xvf stc_eddy_heat_fluxes_obs_data.tar | |
# clean up tarballs | |
rm -f *.tar | |
rm -f *.tar.gz | |
- name: Run CMIP diagnostic tests set 3 | |
run: | | |
micromamba activate _MDTF_base | |
# run the test PODs | |
./mdtf -f ${{matrix.json-file-3}} | |
#- name: Run unit tests | |
# run: | | |
# micromamba activate _MDTF_base | |
# python -m unittest discover |