diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index a788a103621a5d..52beb6c1e56ff5 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -4368,6 +4368,10 @@ def create_accelerator_and_postprocess(self): "`non_blocking` is only supported in accelerate v0.30.0 and above. Please upgrade accelerate to use this feature." ) else: + if non_blocking and not self.args.dataloader_pin_memory: + logger.warning( + "`non_blocking` is enabled but `dataloader_pin_memory` is not. For the best performance, it's recommended to enable both." + ) dataloader_config.non_blocking = non_blocking # this would have been updated above, no need for it anymore accelerator_config.pop("gradient_accumulation_kwargs")