OAI v2 examples #296
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This code is autogenerated. | |
# Code is generated by running custom script: python3 readme.py | |
# Any manual changes to this file may cause incorrect behavior. | |
# Any manual changes will be overwritten if the code is regenerated. | |
name: sdk-jobs-spark-submit_spark_pipeline_jobs | |
# This file is created by sdk/python/readme.py. | |
# Please do not edit directly. | |
on: | |
workflow_dispatch: | |
schedule: | |
- cron: "33 1/12 * * *" | |
pull_request: | |
branches: | |
- main | |
paths: | |
- sdk/python/jobs/spark/** | |
- .github/workflows/sdk-jobs-spark-submit_spark_pipeline_jobs.yml | |
- sdk/python/dev-requirements.txt | |
- infra/bootstrapping/** | |
- sdk/python/setup.sh | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
steps: | |
- name: check out repo | |
uses: actions/checkout@v2 | |
- name: setup python | |
uses: actions/setup-python@v2 | |
with: | |
python-version: "3.8" | |
- name: pip install notebook reqs | |
run: pip install -r sdk/python/dev-requirements.txt | |
- name: azure login | |
uses: azure/login@v1 | |
with: | |
creds: ${{secrets.AZUREML_CREDENTIALS}} | |
- name: bootstrap resources | |
run: | | |
echo '${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}'; | |
bash bootstrap.sh | |
working-directory: infra/bootstrapping | |
continue-on-error: false | |
- name: setup SDK | |
run: | | |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh"; | |
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh"; | |
bash setup.sh | |
working-directory: sdk/python | |
continue-on-error: true | |
- name: setup-cli | |
run: | | |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh"; | |
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh"; | |
bash setup.sh | |
working-directory: cli | |
continue-on-error: true | |
- name: setup spark resources | |
run: | | |
bash -x jobs/spark/setup_spark.sh jobs/spark/ jobs/spark/submit_spark_pipeline_jobs.ipynb | |
working-directory: sdk/python | |
continue-on-error: true | |
- name: run jobs/spark/submit_spark_pipeline_jobs.ipynb | |
run: | | |
source "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh"; | |
source "${{ github.workspace }}/infra/bootstrapping/init_environment.sh"; | |
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" generate_workspace_config "../../.azureml/config.json"; | |
bash "${{ github.workspace }}/infra/bootstrapping/sdk_helpers.sh" replace_template_values "submit_spark_pipeline_jobs.ipynb"; | |
[ -f "../../.azureml/config" ] && cat "../../.azureml/config"; | |
papermill -k python submit_spark_pipeline_jobs.ipynb submit_spark_pipeline_jobs.output.ipynb | |
working-directory: sdk/python/jobs/spark | |
- name: upload notebook's working folder as an artifact | |
if: ${{ always() }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: submit_spark_pipeline_jobs | |
path: sdk/python/jobs/spark |