From 4dc485257aa10a88a34b7ebee75d385cfa07ae05 Mon Sep 17 00:00:00 2001 From: Xuanlei Zhao Date: Mon, 11 Dec 2023 18:06:03 +0800 Subject: [PATCH] update --- colossalai/accelerator/cuda_accelerator.py | 2 +- colossalai/accelerator/npu_accelerator.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/colossalai/accelerator/cuda_accelerator.py b/colossalai/accelerator/cuda_accelerator.py index 1be042f59c57..788ffefc55bf 100644 --- a/colossalai/accelerator/cuda_accelerator.py +++ b/colossalai/accelerator/cuda_accelerator.py @@ -39,7 +39,7 @@ def set_device(self, device: Union[torch.device, int]) -> None: """ if device is None: if not dist.is_initialized(): - raise RuntimeError("Cannot get current device when distributed is not initialized") + raise RuntimeError("Cannot get current device when distributed is not initialized.") device = dist.get_rank() % self.device_count() torch.cuda.set_device(device) diff --git a/colossalai/accelerator/npu_accelerator.py b/colossalai/accelerator/npu_accelerator.py index 3a71a5f945bb..86cd6b455d44 100644 --- a/colossalai/accelerator/npu_accelerator.py +++ b/colossalai/accelerator/npu_accelerator.py @@ -48,7 +48,7 @@ def set_device(self, device: Union[torch.device, int]) -> None: """ if device is None: if not dist.is_initialized(): - raise RuntimeError("Cannot get current device when distributed is not initialized") + raise RuntimeError("Cannot get current device when distributed is not initialized.") device = dist.get_rank() % self.device_count() torch.npu.set_device(device)