Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Apr 12, 2024
1 parent 61f203d commit dec23af
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion llmfoundry/models/layers/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def __init__(
super().__init__()

ffn_type = ffn_config['ffn_type']
ffn_has_norm = not ffn_type in ffns_with_norm
ffn_has_norm = ffn_type in ffns_with_norm

if self.fuse_norm_attn_norm:
self.norm_attn_norm = FusedNormAttentionNorm(
Expand Down

0 comments on commit dec23af

Please sign in to comment.