Skip to content

bump rocm image

bump rocm image #401

name: Self-hosted runner (AMD mi210 scheduled CI caller)
on:
workflow_run:
workflows: ["Self-hosted runner (AMD scheduled CI caller)"]
branches: ["main"]
types: [completed]
push:
branches:
- run_amd_scheduled_ci_caller*
jobs:
model-ci:
name: Model CI
uses: huggingface/hf-workflows/.github/workflows/transformers_amd_ci_scheduled.yaml@testing
with:
job: run_models_gpu
slack_report_channel: "#transformers-ci-daily-amd"
runner: mi210
docker: huggingface/transformers-pytorch-amd-gpu
ci_event: Scheduled CI (AMD) - mi210
secrets: inherit
#torch-pipeline:
# name: Torch pipeline CI
# uses: huggingface/hf-workflows/.github/workflows/transformers_amd_ci_scheduled.yaml@main
# with:
# job: run_pipelines_torch_gpu
# slack_report_channel: "#transformers-ci-daily-amd"
# runner: mi210
# docker: huggingface/transformers-pytorch-amd-gpu
# ci_event: Scheduled CI (AMD) - mi210
# secrets: inherit
#example-ci:
# name: Example CI
# uses: huggingface/hf-workflows/.github/workflows/transformers_amd_ci_scheduled.yaml@main
# with:
# job: run_examples_gpu
# slack_report_channel: "#transformers-ci-daily-amd"
# runner: mi210
# docker: huggingface/transformers-pytorch-amd-gpu
# ci_event: Scheduled CI (AMD) - mi210
# secrets: inherit
#deepspeed-ci:
# name: DeepSpeed CI
# uses: huggingface/hf-workflows/.github/workflows/transformers_amd_ci_scheduled.yaml@main
# with:
# job: run_torch_cuda_extensions_gpu
# slack_report_channel: "#transformers-ci-daily-amd"
# runner: mi210
# docker: huggingface/transformers-pytorch-deepspeed-amd-gpu
# ci_event: Scheduled CI (AMD) - mi210
# secrets: inherit