Skip to content

Commit

Permalink
upgrade bnb 0.45.0 and peft 0.14.0 (#2126)
Browse files Browse the repository at this point in the history
* upgrade bnb to lastest release

* update peft to working supporting commit

* bump to latest release of peft==0.14.0
  • Loading branch information
winglian authored Dec 6, 2024
1 parent 5726141 commit 6b3058b
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 18 deletions.
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
packaging==23.2
peft==0.13.2
peft==0.14.0
transformers==4.46.3
tokenizers>=0.20.1
bitsandbytes==0.44.1
bitsandbytes==0.45.0
accelerate==1.1.0
datasets==3.1.0
deepspeed==0.15.4
Expand Down
13 changes: 0 additions & 13 deletions src/axolotl/utils/config/models/input/v0_4_1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1521,19 +1521,6 @@ def check_sample_packing_w_sdpa_bf16(cls, data):

return data

@model_validator(mode="before")
@classmethod
def check_hopper_8bit_lora(cls, data):
is_sm_90: bool = (
data["capabilities"]
and data["capabilities"].get("compute_capability") == "sm_90"
)
if data.get("adapter") and data.get("load_in_8bit") and is_sm_90:
# see https://github.com/bitsandbytes-foundation/bitsandbytes/issues/538#issuecomment-2262945464
raise ValueError("8-bit LoRA is not supported on Hopper GPUs")

return data

@model_validator(mode="before")
@classmethod
def check_fsdp_deepspeed(cls, data):
Expand Down
3 changes: 0 additions & 3 deletions tests/e2e/multigpu/test_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

from axolotl.utils.dict import DictDefault

from ..utils import is_hopper

LOG = logging.getLogger("axolotl.tests.e2e.multigpu")
os.environ["WANDB_DISABLED"] = "true"

Expand Down Expand Up @@ -144,7 +142,6 @@ def test_lora_ddp_packed(self, temp_dir, gradient_accumulation_steps):
]
)

@pytest.mark.skipif(is_hopper(), reason="h100 doesn't support 8-bit lora")
def test_dpo_lora_ddp(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
Expand Down

0 comments on commit 6b3058b

Please sign in to comment.