Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Apr 12, 2024
1 parent 73db66c commit 6ebd28c
Showing 1 changed file with 8 additions and 7 deletions.
15 changes: 8 additions & 7 deletions llmfoundry/models/layers/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,14 @@ def __init__(
del kwargs # unused, just to capture any extra args from the config
super().__init__()

self.ffn = build_ffn(
d_model=d_model,
expansion_ratio=expansion_ratio,
device=device,
bias=not no_bias,
**ffn_config,
)

if self.fuse_norm_attn_norm:
self.norm_attn_norm = FusedNormAttentionNorm(
d_model=d_model,
Expand Down Expand Up @@ -122,13 +130,6 @@ def __init__(
device=device,
)

self.ffn = build_ffn(
d_model=d_model,
expansion_ratio=expansion_ratio,
device=device,
bias=not no_bias,
**ffn_config,
)
self.resid_attn_dropout = nn.Dropout(resid_pdrop)
self.resid_ffn_dropout = nn.Dropout(resid_pdrop)
self.use_pad_tok_in_ffn = use_pad_tok_in_ffn
Expand Down

0 comments on commit 6ebd28c

Please sign in to comment.