Skip to content

Commit

Permalink
fix gptq tests
Browse files Browse the repository at this point in the history
  • Loading branch information
MekkCyber committed Nov 22, 2024
1 parent 4e90b99 commit bb1e2d3
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 53 deletions.
104 changes: 52 additions & 52 deletions .github/workflows/self-scheduled-caller.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,64 +7,64 @@ on:
- cron: "17 2 * * *"
push:
branches:
- run_scheduled_ci*
- fix_gptq_test

jobs:
model-ci:
name: Model CI
uses: ./.github/workflows/self-scheduled.yml
with:
job: run_models_gpu
slack_report_channel: "#transformers-ci-daily-models"
runner: daily-ci
docker: huggingface/transformers-all-latest-gpu
ci_event: Daily CI
secrets: inherit
# model-ci:
# name: Model CI
# uses: ./.github/workflows/self-scheduled.yml
# with:
# job: run_models_gpu
# slack_report_channel: "#transformers-ci-daily-models"
# runner: daily-ci
# docker: huggingface/transformers-all-latest-gpu
# ci_event: Daily CI
# secrets: inherit

torch-pipeline:
name: Torch pipeline CI
uses: ./.github/workflows/self-scheduled.yml
with:
job: run_pipelines_torch_gpu
slack_report_channel: "#transformers-ci-daily-pipeline-torch"
runner: daily-ci
docker: huggingface/transformers-pytorch-gpu
ci_event: Daily CI
secrets: inherit
# torch-pipeline:
# name: Torch pipeline CI
# uses: ./.github/workflows/self-scheduled.yml
# with:
# job: run_pipelines_torch_gpu
# slack_report_channel: "#transformers-ci-daily-pipeline-torch"
# runner: daily-ci
# docker: huggingface/transformers-pytorch-gpu
# ci_event: Daily CI
# secrets: inherit

tf-pipeline:
name: TF pipeline CI
uses: ./.github/workflows/self-scheduled.yml
with:
job: run_pipelines_tf_gpu
slack_report_channel: "#transformers-ci-daily-pipeline-tf"
runner: daily-ci
docker: huggingface/transformers-tensorflow-gpu
ci_event: Daily CI
secrets: inherit
# tf-pipeline:
# name: TF pipeline CI
# uses: ./.github/workflows/self-scheduled.yml
# with:
# job: run_pipelines_tf_gpu
# slack_report_channel: "#transformers-ci-daily-pipeline-tf"
# runner: daily-ci
# docker: huggingface/transformers-tensorflow-gpu
# ci_event: Daily CI
# secrets: inherit

example-ci:
name: Example CI
uses: ./.github/workflows/self-scheduled.yml
with:
job: run_examples_gpu
slack_report_channel: "#transformers-ci-daily-examples"
runner: daily-ci
docker: huggingface/transformers-all-latest-gpu
ci_event: Daily CI
secrets: inherit
# example-ci:
# name: Example CI
# uses: ./.github/workflows/self-scheduled.yml
# with:
# job: run_examples_gpu
# slack_report_channel: "#transformers-ci-daily-examples"
# runner: daily-ci
# docker: huggingface/transformers-all-latest-gpu
# ci_event: Daily CI
# secrets: inherit

deepspeed-ci:
name: DeepSpeed CI
uses: ./.github/workflows/self-scheduled.yml
with:
job: run_torch_cuda_extensions_gpu
slack_report_channel: "#transformers-ci-daily-deepspeed"
runner: daily-ci
docker: huggingface/transformers-pytorch-deepspeed-latest-gpu
ci_event: Daily CI
working-directory-prefix: /workspace
secrets: inherit
# deepspeed-ci:
# name: DeepSpeed CI
# uses: ./.github/workflows/self-scheduled.yml
# with:
# job: run_torch_cuda_extensions_gpu
# slack_report_channel: "#transformers-ci-daily-deepspeed"
# runner: daily-ci
# docker: huggingface/transformers-pytorch-deepspeed-latest-gpu
# ci_event: Daily CI
# working-directory-prefix: /workspace
# secrets: inherit

quantization-ci:
name: Quantization CI
Expand Down
12 changes: 11 additions & 1 deletion src/transformers/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1002,8 +1002,18 @@ def is_compressed_tensors_available():


def is_auto_gptq_available():
return _auto_gptq_available
if not _auto_gptq_available:
return _auto_gptq_available

try:
from auto_gptq import exllama_set_max_input_length
except ImportError as exc:
if "shard_checkpoint" in str(exc):
# auto_gptq requires eetq and it is currently broken with newer transformers versions because it tries to import shard_checkpoint
# see https://github.com/NetEase-FuXi/EETQ/issues/34
# TODO: Remove once eetq releasees a fix and this release is used in CI
return False
return _auto_gptq_available

def is_eetq_available():
return _eetq_available
Expand Down

0 comments on commit bb1e2d3

Please sign in to comment.