From 690bd4d1ceb366d20e12215a46bd5d33d83a1ec9 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 18 Sep 2023 18:51:56 -0400 Subject: [PATCH] fix load_in_8bit check --- src/axolotl/utils/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 76a6fb097f..a55e06ab22 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -368,9 +368,9 @@ def load_model( needs_fa2_dtype = cfg.adapter or cfg.fsdp if ( - (cfg.adapter == "lora" and load_in_8bit) + (cfg.adapter == "lora" and cfg.load_in_8bit) or (cfg.adapter == "qlora" and cfg.load_in_4bit) - or (cfg.adapter == "ia3" and load_in_8bit) + or (cfg.adapter == "ia3" and cfg.load_in_8bit) ): LOG.info("converting PEFT model w/ prepare_model_for_kbit_training") if cfg.gradient_checkpointing: