Skip to content

Commit

Permalink
use global env for data path
Browse files Browse the repository at this point in the history
  • Loading branch information
zacharyburnett committed Oct 4, 2023
1 parent 9e6f9ad commit 78cf37b
Showing 1 changed file with 14 additions and 17 deletions.
31 changes: 14 additions & 17 deletions .github/workflows/data.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ on:
workflow_call:
outputs:
path:
value: ${{ jobs.path.outputs.path }}
value: ${{ env.DATA_PATH }}
crds_path:
value: ${{ jobs.crds.outputs.path }}
crds_server:
Expand All @@ -19,21 +19,19 @@ on:
schedule:
- cron: "42 4 * * 3"

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

env:
DATA_PATH: /tmp/data

jobs:
path:
name: set data path
runs-on: ubuntu-latest
outputs:
path: ${{ steps.path.outputs.path }}
steps:
- id: path
run: echo "path=/tmp/data" >> $GITHUB_OUTPUT
crds:
needs: [ path ]
name: retrieve current CRDS context
runs-on: ubuntu-latest
env:
CRDS_PATH: ${{ needs.path.outputs.path }}/crds
CRDS_PATH: ${{ env.DATA_PATH }}/crds
CRDS_SERVER_URL: https://roman-crds.stsci.edu
OBSERVATORY: roman
outputs:
Expand All @@ -50,7 +48,6 @@ jobs:
# Get default CRDS_CONTEXT without installing crds client
# See https://hst-crds.stsci.edu/static/users_guide/web_services.html#generic-request
download_webbpsf_data:
needs: [ path ]
if: (github.repository == 'spacetelescope/romancal' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || contains(github.event.pull_request.labels.*.name, 'update webbpsf data')))
name: download and cache WebbPSF data
runs-on: ubuntu-latest
Expand All @@ -66,7 +63,7 @@ jobs:
wget ${{ env.WEBBPSF_DATA_URL }} -O tmp/webbpsf-data.tar.gz
echo "hash=$( shasum tmp/webbpsf-data.tar.gz | cut -d ' ' -f 1 )" >> $GITHUB_OUTPUT
- id: cache_path
run: echo "path=${{ needs.path.outputs.path }}/webbpsf-data" >> $GITHUB_OUTPUT
run: echo "path=${{ env.DATA_PATH }}/webbpsf-data" >> $GITHUB_OUTPUT
- id: cache_download
name: check downloaded data against the existing cache
uses: actions/cache@v3
Expand All @@ -76,10 +73,10 @@ jobs:
- if: ${{ steps.cache_download.outputs.cache-hit != 'true' }}
name: extract data to cache directory
run: |
mkdir -p ${{ needs.path.outputs.path }}
tar -xzvf tmp/webbpsf-data.tar.gz -C ${{ needs.path.outputs.path }}
mkdir -p ${{ env.DATA_PATH }}
tar -xzvf tmp/webbpsf-data.tar.gz -C ${{ env.DATA_PATH }}
retrieve_webbpsf_data_hash:
needs: [ path, download_webbpsf_data ]
needs: [ download_webbpsf_data ]
# run regardless if `download_webbpsf_data' succeeds or is skipped
if: always() && (needs.download_webbpsf_data.result == 'success' || needs.download_webbpsf_data.result == 'skipped')
name: retrieve hash of cached WebbPSF data
Expand All @@ -91,7 +88,7 @@ jobs:
hash: ${{ steps.retrieve_hash.outputs.hash }}
steps:
- id: cache_path
run: echo "path=${{ needs.path.outputs.path }}/webbpsf-data" >> $GITHUB_OUTPUT
run: echo "path=${{ env.DATA_PATH }}/webbpsf-data" >> $GITHUB_OUTPUT
- id: retrieve_hash
name: retrieve data hash of latest cache key
run: |
Expand Down

0 comments on commit 78cf37b

Please sign in to comment.