Tonic commited on
Commit
f1d7f71
1 Parent(s): 4bfcc47

add flash attention

Browse files
Files changed (1) hide show
  1. requirements.txt +1 -2
requirements.txt CHANGED
@@ -1,7 +1,7 @@
1
  huggingface_hub
2
  einops
3
  sentence-transformers
4
- torch==2.2.0
5
  transformers
6
  openai
7
  python-dotenv
@@ -11,7 +11,6 @@ langchain-chroma
11
  unstructured[all-docs]
12
  libmagic
13
  gradio
14
- torch==2.2.0+cu121 # Ensure you're using the right PyTorch version with CUDA support
15
  flash-attn==2.6.3 # Flash attention module
16
  numpy<2 # Downgrade to avoid NumPy 2.0.1 conflicts
17
  pybind11>=2.12 # Ensure compatibility for modules needing pybind11
 
1
  huggingface_hub
2
  einops
3
  sentence-transformers
4
+ torch
5
  transformers
6
  openai
7
  python-dotenv
 
11
  unstructured[all-docs]
12
  libmagic
13
  gradio
 
14
  flash-attn==2.6.3 # Flash attention module
15
  numpy<2 # Downgrade to avoid NumPy 2.0.1 conflicts
16
  pybind11>=2.12 # Ensure compatibility for modules needing pybind11