Skip to content

Commit

Permalink
fix load_in_8bit check
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Sep 18, 2023
1 parent 3124670 commit 690bd4d
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/axolotl/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,9 +368,9 @@ def load_model(

needs_fa2_dtype = cfg.adapter or cfg.fsdp
if (
(cfg.adapter == "lora" and load_in_8bit)
(cfg.adapter == "lora" and cfg.load_in_8bit)
or (cfg.adapter == "qlora" and cfg.load_in_4bit)
or (cfg.adapter == "ia3" and load_in_8bit)
or (cfg.adapter == "ia3" and cfg.load_in_8bit)
):
LOG.info("converting PEFT model w/ prepare_model_for_kbit_training")
if cfg.gradient_checkpointing:
Expand Down

0 comments on commit 690bd4d

Please sign in to comment.