Skip to content

Commit

Permalink
[legacy] fix engine clip grad norm
Browse files Browse the repository at this point in the history
  • Loading branch information
ver217 committed Sep 12, 2023
1 parent 9d0c60c commit 9a6012f
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion colossalai/legacy/engine/_base_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def step(self):
"""Execute parameter update
"""
self._all_reduce_gradients()
self.optimizer.clip_grad_norm(self.model, self._clip_grad_norm)
self.optimizer.clip_grad_by_norm(self._clip_grad_norm)
return self.optimizer.step()

def backward(self, loss: Tensor):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def clip_grad_norm(self, model: nn.Module, max_norm: float) -> None:
if self.accumulate_step < self.accumulate_size:
pass
else:
self.optim.clip_grad_norm(model, max_norm)
self.optim.clip_grad_by_norm(max_norm)

def backward(self, loss: Tensor) -> None:
"""Execute backward pass.
Expand Down

0 comments on commit 9a6012f

Please sign in to comment.