Skip to content

Commit

Permalink
add error message
Browse files Browse the repository at this point in the history
  • Loading branch information
ShashankMosaicML committed Apr 1, 2024
1 parent 039caec commit 16b5b0d
Showing 1 changed file with 2 additions and 7 deletions.
9 changes: 2 additions & 7 deletions llmfoundry/models/mpt/modeling_mpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -1022,14 +1022,9 @@ def loss(self, outputs: CausalLMOutputWithPast,
targets)

if torch.all(targets == -100):
if torch.any(losses != 0.0):
raise ValueError(
f'Losses are non-zero even though all the targets are -100.'
)
warnings.warn(
'All targets are -100. This is likely due to padding tokens. Better sequence packing may reduce the number of padding tokens.'
raise ValueError(
'All targets are -100. This might be due to all tokens in a sequence being padding tokens.'
)
loss = losses.sum()
else:
loss = losses.sum() / (targets != self.loss_fn.ignore_index).sum()

Expand Down

0 comments on commit 16b5b0d

Please sign in to comment.