From 2d97897a2567df339caaaf74c5f3e2363fcf3e72 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Wed, 25 Oct 2023 11:21:18 -0700 Subject: [PATCH] Don't install flash-attention on windows + cuda 11 --- one_click.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/one_click.py b/one_click.py index 8fc5cfcef1..169d328b1d 100644 --- a/one_click.py +++ b/one_click.py @@ -289,6 +289,9 @@ def update_requirements(initial_installation=False): textgen_requirements = [req.replace('+cu121', '+cu117').replace('+cu122', '+cu117').replace('torch2.1', 'torch2.0') for req in textgen_requirements] elif is_cuda118: textgen_requirements = [req.replace('+cu121', '+cu118').replace('+cu122', '+cu118') for req in textgen_requirements] + if is_windows() and (is_cuda117 or is_cuda118): # No flash-attention on Windows for CUDA 11 + textgen_requirements = [req for req in textgen_requirements if 'bdashore3/flash-attention' not in req] + with open('temp_requirements.txt', 'w') as file: file.write('\n'.join(textgen_requirements))