From 16b5b0ddd96387af755ef09f12ee839f0b42acb3 Mon Sep 17 00:00:00 2001 From: Shashank Rajput Date: Mon, 1 Apr 2024 22:17:45 +0000 Subject: [PATCH] add error message --- llmfoundry/models/mpt/modeling_mpt.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/llmfoundry/models/mpt/modeling_mpt.py b/llmfoundry/models/mpt/modeling_mpt.py index 1be0ce40e7..0639a0087a 100644 --- a/llmfoundry/models/mpt/modeling_mpt.py +++ b/llmfoundry/models/mpt/modeling_mpt.py @@ -1022,14 +1022,9 @@ def loss(self, outputs: CausalLMOutputWithPast, targets) if torch.all(targets == -100): - if torch.any(losses != 0.0): - raise ValueError( - f'Losses are non-zero even though all the targets are -100.' - ) - warnings.warn( - 'All targets are -100. This is likely due to padding tokens. Better sequence packing may reduce the number of padding tokens.' + raise ValueError( + 'All targets are -100. This might be due to all tokens in a sequence being padding tokens.' ) - loss = losses.sum() else: loss = losses.sum() / (targets != self.loss_fn.ignore_index).sum()