wagnercosta commited on
Commit
fe9095d
1 Parent(s): 98ccbed

Add flash atttention to requirements file

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. requirements.txt +1 -0
app.py CHANGED
@@ -166,8 +166,8 @@ def process_and_visualize(text, model):
166
 
167
 
168
 
169
- with gr.Blocks(title="Phi-3 Mini 4k Instruct Graph (by Emergent Methods") as demo:
170
- gr.Markdown("# Phi-3 Mini 4k Instruct Graph (by Emergent Methods)")
171
  gr.Markdown("Extract a JSON graph from a text input and visualize it.")
172
  with gr.Row():
173
  with gr.Column(scale=1):
 
166
 
167
 
168
 
169
+ with gr.Blocks(title="Phi-3 Instruct Graph (by Emergent Methods") as demo:
170
+ gr.Markdown("# Phi-3 Instruct Graph (by Emergent Methods)")
171
  gr.Markdown("Extract a JSON graph from a text input and visualize it.")
172
  with gr.Row():
173
  with gr.Column(scale=1):
requirements.txt CHANGED
@@ -8,3 +8,4 @@ spaces
8
  pyvis
9
  networkx
10
  spacy
 
 
8
  pyvis
9
  networkx
10
  spacy
11
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl