Skip to content

Commit

Permalink
Don't install flash-attention on windows + cuda 11
Browse files Browse the repository at this point in the history
  • Loading branch information
oobabooga committed Oct 25, 2023
1 parent 0ced78f commit 2d97897
Showing 1 changed file with 3 additions and 0 deletions.
3 changes: 3 additions & 0 deletions one_click.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,9 @@ def update_requirements(initial_installation=False):
textgen_requirements = [req.replace('+cu121', '+cu117').replace('+cu122', '+cu117').replace('torch2.1', 'torch2.0') for req in textgen_requirements]
elif is_cuda118:
textgen_requirements = [req.replace('+cu121', '+cu118').replace('+cu122', '+cu118') for req in textgen_requirements]
if is_windows() and (is_cuda117 or is_cuda118): # No flash-attention on Windows for CUDA 11
textgen_requirements = [req for req in textgen_requirements if 'bdashore3/flash-attention' not in req]

with open('temp_requirements.txt', 'w') as file:
file.write('\n'.join(textgen_requirements))

Expand Down

0 comments on commit 2d97897

Please sign in to comment.