From ef6c8c25cf81d27741c58c17584f060887c0ca7b Mon Sep 17 00:00:00 2001 From: Daniel King <43149077+dakinggg@users.noreply.github.com> Date: Wed, 24 Jul 2024 04:44:31 -0400 Subject: [PATCH] Update llmfoundry/models/mpt/modeling_mpt.py --- llmfoundry/models/mpt/modeling_mpt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llmfoundry/models/mpt/modeling_mpt.py b/llmfoundry/models/mpt/modeling_mpt.py index 1115ddc14b..b3f0b361d8 100644 --- a/llmfoundry/models/mpt/modeling_mpt.py +++ b/llmfoundry/models/mpt/modeling_mpt.py @@ -183,7 +183,7 @@ def gen_rotary_embedding( ) elif rope_hf_config['type'] == 'llama3': llama_rope_config = {**rope_hf_config} - llama_rope_config['rope_type'] = rope_hf_config.pop('type') + llama_rope_config['rope_type'] = rope_hf_config.get('type') return LlamaRotaryEmbedding( config=PartialLlamaConfig( rope_scaling=llama_rope_config,