Skip to content

Commit

Permalink
add data workflow to retrieve WebbPSF cache and rename toxenvs (#223)
Browse files Browse the repository at this point in the history
* add data workflow to retrieve WebbPSF cache

* rename toxenvs to be explicit
  • Loading branch information
zacharyburnett authored Oct 25, 2023
1 parent 2251433 commit 4a809eb
Show file tree
Hide file tree
Showing 3 changed files with 94 additions and 51 deletions.
60 changes: 14 additions & 46 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,63 +23,31 @@ jobs:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
with:
envs: |
- linux: check-style
- linux: check-security
- linux: check-build
test:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
with:
envs: |
- linux: test-oldestdeps-cov-xdist
python-version: 3.9
- linux: test-xdist
python-version: '3.9'
- linux: test-xdist
python-version: '3.10'
- linux: test-xdist
python-version: '3.11'
- macos: test-xdist
python-version: '3.11'
- linux: test-cov-xdist
- linux: py39-oldestdeps-cov-xdist
- linux: py39-xdist
- linux: py310-xdist
- linux: py311-xdist
- macos: py311-xdist
- linux: py3-cov-xdist
coverage: 'codecov'
data:
name: retrieve data
runs-on: ubuntu-latest
outputs:
data_path: ${{ steps.data.outputs.path }}
data_hash: ${{ steps.data_hash.outputs.hash }}
steps:
# webbpsf:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- id: data
run: |
echo "path=/tmp/data" >> $GITHUB_OUTPUT
echo "webbpsf_url=https://stsci.box.com/shared/static/n1fealx9q0m6sdnass6wnyfikvxtc0zz.gz" >> $GITHUB_OUTPUT
- run: |
mkdir -p tmp/data/
mkdir -p ${{ steps.data.outputs.path }}
- run: wget ${{ steps.data.outputs.webbpsf_url }} -O tmp/minimal-webbpsf-data.tar.gz
- run: tar -xzvf tmp/minimal-webbpsf-data.tar.gz -C tmp/data/
- id: data_hash
run: echo "hash=${{ hashFiles( 'tmp/data' ) }}" >> $GITHUB_OUTPUT
- run: mv tmp/data/* ${{ steps.data.outputs.path }}
- uses: actions/cache@v3
with:
path: ${{ steps.data.outputs.path }}
key: data-${{ steps.data_hash.outputs.hash }}
uses: ./.github/workflows/data.yml
test_downstream:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main
needs: [ data ]
with:
setenv: |
CRDS_PATH: /tmp/crds_cache
WEBBPSF_PATH: ${{ needs.data.outputs.webbpsf_path }}
CRDS_PATH: ${{ needs.data.outputs.path }}/crds_cache
CRDS_CLIENT_RETRY_COUNT: 3
CRDS_CLIENT_RETRY_DELAY_SECONDS: 20
WEBBPSF_PATH: ${{ needs.data.outputs.data_path }}/webbpsf-data
cache-path: ${{ needs.data.outputs.data_path }}
cache-key: data-${{ needs.data.outputs.data_hash }}
CRDS_CLIENT_RETRY_DELAY_SECONDS: 20
cache-path: ${{ needs.data.outputs.webbpsf_path }}
cache-key: webbpsf-${{ needs.data.outputs.webbpsf_hash }}
envs: |
- linux: test-jwst-cov-xdist
- linux: test-romancal-cov-xdist
- linux: py311-jwst-cov-xdist
- linux: py311-romancal-cov-xdist
8 changes: 3 additions & 5 deletions .github/workflows/ci_cron.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ jobs:
uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1
with:
envs: |
- macos: test-xdist
python-version: 3.9
- macos: test-xdist
python-version: 3.10
- linux: test-devdeps-xdist
- macos: py39-xdist
- macos: py310-xdist
- linux: py3-devdeps-xdist
77 changes: 77 additions & 0 deletions .github/workflows/data.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
on:
workflow_call:
outputs:
path:
value: ${{ jobs.path.outputs.path }}
webbpsf_path:
value: ${{ jobs.webbpsf_path.outputs.path }}
webbpsf_hash:
value: ${{ jobs.webbpsf_data.outputs.hash }}
workflow_dispatch:
schedule:
- cron: "42 4 * * 3"

env:
DATA_PATH: /tmp/data

jobs:
path:
runs-on: ubuntu-latest
outputs:
path: ${{ steps.path.outputs.path }}
steps:
- id: path
run: echo "path=${{ env.DATA_PATH }}" >> $GITHUB_OUTPUT
webbpsf_path:
needs: [ path ]
runs-on: ubuntu-latest
outputs:
path: ${{ steps.path.outputs.path }}
steps:
- id: path
run: echo "path=${{ env.DATA_PATH }}/webbpsf-data" >> $GITHUB_OUTPUT
webbpsf_data:
if: (github.repository == 'spacetelescope/stcal' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || contains(github.event.pull_request.labels.*.name, 'update webbpsf data')))
needs: [ webbpsf_path ]
name: download and cache WebbPSF data
runs-on: ubuntu-latest
env:
WEBBPSF_DATA_URL: https://stsci.box.com/shared/static/qxpiaxsjwo15ml6m4pkhtk36c9jgj70k.gz
steps:
- run: mkdir -p tmp/data
- run: wget ${{ env.WEBBPSF_DATA_URL }} -O tmp/webbpsf-data.tar.gz
- id: data_hash
run: echo "hash=$( shasum tmp/webbpsf-data.tar.gz | cut -d ' ' -f 1 )" >> $GITHUB_OUTPUT
- id: cache_check
uses: actions/cache@v3
with:
path: ${{ needs.webbpsf_path.outputs.path }}
key: webbpsf-${{ steps.data_hash.outputs.hash }}
- if: ${{ steps.cache_check.outputs.cache-hit != 'true' }}
run: mkdir -p ${{ env.DATA_PATH }}
- if: ${{ steps.cache_check.outputs.cache-hit != 'true' }}
run: tar -xzvf tmp/webbpsf-data.tar.gz -C ${{ env.DATA_PATH }}
webbpsf_hash:
needs: [ webbpsf_path, webbpsf_data ]
# run data job if webbpsf-data succeeds or is skipped. This allows
# this data job to always fetch the crds context even if the webbpsf data fetching
# was skipped (and an existing cache will be used for the webbpsf data).
if: always() && (needs.webbpsf_data.result == 'success' || needs.webbpsf_data.result == 'skipped')
name: retrieve latest data cache key
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ github.token }}
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- id: hash
run: |
# use actions/gh-actions-cache to allow filtering by key
gh extension install actions/gh-actions-cache
RECENT=$(gh actions-cache list -R spacetelescope/stcal --key webbpsf- --sort created-at | cut -f 1 | head -n 1)
echo "RECENT=$RECENT"
HASH=$(echo $RECENT | cut -d '-' -f 2)
echo "HASH=$HASH"
echo "hash=$HASH" >> $GITHUB_OUTPUT
if [ "$HASH" == '' ]; then exit 1; fi

0 comments on commit 4a809eb

Please sign in to comment.