Skip to content

Commit

Permalink
add avlue error
Browse files Browse the repository at this point in the history
  • Loading branch information
vchiley committed Jan 13, 2024
1 parent 22ec061 commit a121133
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions llmfoundry/models/layers/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -547,6 +547,8 @@ def __init__(
raise ValueError(
'Each Q head should get the same number of KV heads, so n_heads must be divisible by kv_n_heads.'
)
if qk_ln and qk_gn:
raise ValueError('Only one of qk_ln and qk_gn can be set to True.')

self.softmax_scale = softmax_scale
if self.softmax_scale is None:
Expand Down

0 comments on commit a121133

Please sign in to comment.