diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index 6950e8e66d3ac1..a1b5b511a95e35 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -2164,7 +2164,7 @@ def _setup_devices(self) -> "torch.device": if not is_accelerate_available(): raise ImportError( f"Using the `Trainer` with `PyTorch` requires `accelerate>={ACCELERATE_MIN_VERSION}`: " - "Please run `pip install transformers[torch]` or `pip install 'accelerate>={ACCELERATE_MIN_VERSION}'`" + f"Please run `pip install transformers[torch]` or `pip install 'accelerate>={ACCELERATE_MIN_VERSION}'`" ) # We delay the init of `PartialState` to the end for clarity accelerator_state_kwargs = {"enabled": True, "use_configured_state": False}