From 69873d529db9796eaf8dc52d2d93b0bea11d2001 Mon Sep 17 00:00:00 2001 From: Sourab Mangrulkar <13534540+pacman100@users.noreply.github.com> Date: Wed, 11 Oct 2023 19:28:23 +0530 Subject: [PATCH] fix the model card issue as `use_cuda_amp` is no more available (#26731) --- src/transformers/modelcard.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/modelcard.py b/src/transformers/modelcard.py index 74acb55397a9ea..f1b2f70bc2ea61 100644 --- a/src/transformers/modelcard.py +++ b/src/transformers/modelcard.py @@ -895,10 +895,10 @@ def extract_hyperparameters_from_trainer(trainer): hyperparameters["num_epochs"] = trainer.args.num_train_epochs if trainer.args.fp16: - if trainer.use_cuda_amp: - hyperparameters["mixed_precision_training"] = "Native AMP" - elif trainer.use_apex: + if trainer.use_apex: hyperparameters["mixed_precision_training"] = f"Apex, opt level {trainer.args.fp16_opt_level}" + else: + hyperparameters["mixed_precision_training"] = "Native AMP" if trainer.args.label_smoothing_factor != 0.0: hyperparameters["label_smoothing_factor"] = trainer.args.label_smoothing_factor