Skip to content

Commit

Permalink
convert
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Jan 5, 2024
1 parent 5f4ebd9 commit 8f21297
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion llmfoundry/models/hf/hf_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from llmfoundry.models.hf.model_wrapper import HuggingFaceModelWithZLoss
from llmfoundry.models.layers.attention import is_flash_v2_installed
from llmfoundry.models.utils import init_empty_weights
from llmfoundry.utils.config_utils import pop_config

try:
from peft.peft_model import PeftModel
Expand Down Expand Up @@ -260,7 +261,7 @@ def _autoset_attn_implementation_monkeypatch(
)

from peft import LoraConfig
peft_config = om_model_config.get('peft_config')
peft_config = pop_config(om_model_config, 'peft_config', must_exist=False, convert=True)
peft_type = peft_config.pop('peft_type', None)
peft_config = LoraConfig(**peft_config)

Expand Down

0 comments on commit 8f21297

Please sign in to comment.