Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
wangbluo committed Nov 25, 2024
1 parent fa0318d commit b83143e
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions colossalai/checkpoint_io/hybrid_parallel_checkpoint_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
to_unpadded_tensor,
)
from colossalai.utils import get_current_device, get_non_persistent_buffers_set
from colossalai.utils.safetensors import move_and_save
from colossalai.utils.safetensors import save

from .general_checkpoint_io import GeneralCheckpointIO
from .index_file import CheckpointIndexFile
Expand Down Expand Up @@ -708,7 +708,7 @@ def save_unsharded_model(
if id(model) not in self.pinned_state_dicts:
self.pinned_state_dicts[id(model)] = create_pinned_state_dict(complete_state_dict)
self.async_writers.append(writer)
move_and_save(writer, complete_state_dict, self.pinned_state_dicts[id(model)])
save(writer, complete_state_dict, self.pinned_state_dicts[id(model)])
else:
save_state_dict(complete_state_dict, checkpoint, use_safetensors)

Expand Down

0 comments on commit b83143e

Please sign in to comment.