Skip to content

Commit

Permalink
enable cpu bnb distributed lora finetune (#3159)
Browse files Browse the repository at this point in the history
* enable cpu bnb distributed lora finetune

* check bnb multi-backend
  • Loading branch information
jiqing-feng authored Oct 15, 2024
1 parent 292954b commit a84327e
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion src/accelerate/accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1421,7 +1421,10 @@ def prepare_model(self, model: torch.nn.Module, device_placement: bool = None, e
current_device.index if isinstance(current_device, torch.device) else current_device
)

if torch.device(current_device_index) != self.device:
if self.device.type == "cpu" and is_bitsandbytes_multi_backend_available():
# bnb with multi-backend supports CPU which don't need to check index.
pass
elif torch.device(current_device_index) != self.device:
# if on the first device (GPU 0) we don't care
if (self.device.index is not None) or (current_device_index != 0):
raise ValueError(
Expand Down

0 comments on commit a84327e

Please sign in to comment.