Skip to content

Commit

Permalink
mixtral doesn't support basic lora 🤦
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Jan 12, 2024
1 parent 0e73c29 commit b27500c
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tests/e2e/test_mixtral.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,16 @@ def test_qlora_w_fa2(self, temp_dir):
assert (Path(temp_dir) / "adapter_model.bin").exists()

@with_temp_dir
def test_lora_wo_fa2(self, temp_dir):
def test_qlora_wo_fa2(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
"base_model": "hf-internal-testing/Mixtral-tiny",
"tokenizer_config": "mistralai/Mixtral-8x7B-v0.1",
"flash_attention": False,
"sequence_len": 1024,
"load_in_8bit": True,
"adapter": "lora",
"load_in_4bit": True,
"adapter": "qlora",
"lora_r": 4,
"lora_alpha": 8,
"lora_dropout": 0.1,
Expand Down

0 comments on commit b27500c

Please sign in to comment.