From 0de145718909fd2b8ff759ed5c721a02abcb0764 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 16 Nov 2023 10:42:36 -0500 Subject: [PATCH] try #2: pin hf transformers and accelerate to latest release, don't reinstall pytorch (#867) * isolate torch from the requirements.txt * fix typo for removed line ending * pin transformers and accelerate to latest releases * try w auto-gptq==0.5.1 * update README to remove manual peft install * pin xformers to 0.0.22 * bump flash-attn to 2.3.3 * pin flash attn to exact version --- .github/workflows/tests.yml | 1 + README.md | 1 - requirements.txt | 12 +++++------- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 04fe53c00a..9103126ce1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -71,6 +71,7 @@ jobs: - name: Install dependencies run: | + pip3 install --extra-index-url https://download.pytorch.org/whl/cu118 -U torch==2.0.1 pip3 uninstall -y transformers accelerate pip3 install -U -e .[flash-attn] pip3 install -r requirements-tests.txt diff --git a/README.md b/README.md index ca972d68ac..c859426af7 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,6 @@ cd axolotl pip3 install packaging pip3 install -e '.[flash-attn,deepspeed]' -pip3 install -U git+https://github.com/huggingface/peft.git # finetune lora accelerate launch -m axolotl.cli.train examples/openllama-3b/lora.yml diff --git a/requirements.txt b/requirements.txt index dec9398327..0e182de51d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,22 +1,20 @@ ---extra-index-url https://download.pytorch.org/whl/cu118 --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ -torch==2.0.1 -auto-gptq==0.4.2 +auto-gptq==0.5.1 packaging peft==0.6.0 -transformers @ git+https://github.com/huggingface/transformers.git@acc394c4f5e1283c19783581790b3dc3105a3697 +transformers==4.35.1 bitsandbytes>=0.41.1 -accelerate @ git+https://github.com/huggingface/accelerate@80da9cfb09bb3cc9f1b385cb55d6b90d025a5fd9 +accelerate==0.24.1 deepspeed addict fire PyYAML>=6.0 datasets>=2.14.0 -flash-attn>=2.3.0 +flash-attn==2.3.3 sentencepiece wandb einops -xformers>=0.0.22 +xformers==0.0.22 optimum==1.13.2 hf_transfer colorama