Skip to content

Commit

Permalink
debug
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Apr 12, 2024
1 parent b61d761 commit 21ea5f0
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions llmfoundry/models/mpt/modeling_mpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,13 +325,15 @@ def __init__(self, config: MPTConfig):
self.emb_drop = nn.Dropout(config.emb_pdrop)
self.mb_args = None
block_args = config.to_dict()
print(block_args['ffn_config'])
if block_args['ffn_config']['ffn_type'] in ffns_with_megablocks:
block_args['ffn_config'] = config_moe_args(
block_args['ffn_config'],
config.d_model,
config.expansion_ratio,
config.n_layers,
)
print(block_args['ffn_config'])
self.mb_args = block_args['ffn_config'].get('args')
self.blocks = nn.ModuleList([
MPTBlock(
Expand Down

0 comments on commit 21ea5f0

Please sign in to comment.