0xCha0s commited on
Commit
9fc2714
1 Parent(s): 70542c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -11
app.py CHANGED
@@ -1,18 +1,17 @@
1
- %cd /content
2
- !apt-get -y install -qq aria2
3
 
4
- !git clone -b V20231127 https://github.com/Troyanovsky/text-generation-webui
5
  %cd /content/text-generation-webui
6
- !pip install -r requirements.txt
7
- !pip install -U gradio==3.50.2
8
 
9
- !pip uninstall -y llama-cpp-python
10
- !CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir
11
 
12
- !pip uninstall flash-attn
13
- !pip install --no-build-isolation flash-attn==2.3.0
14
 
15
- !aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/TheBloke/dolphin-2.2.1-mistral-7B-GGUF/resolve/main/dolphin-2.2.1-mistral-7b.Q5_K_M.gguf?download=true -d /content/text-generation-webui/models/ -o dolphin-2.2.1-mistral-7b.Q5_K_M.gguf
16
 
17
  %cd /content/text-generation-webui
18
- !python server.py --share --n-gpu-layers 1000000000 --model dolphin-2.2.1-mistral-7b.Q5_K_M.gguf
 
1
+ apt-get -y install -qq aria2
 
2
 
3
+ git clone -b V20231127 https://github.com/Troyanovsky/text-generation-webui
4
  %cd /content/text-generation-webui
5
+ pip install -r requirements.txt
6
+ pip install -U gradio==3.50.2
7
 
8
+ pip uninstall -y llama-cpp-python
9
+ CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir
10
 
11
+ pip uninstall flash-attn
12
+ pip install --no-build-isolation flash-attn==2.3.0
13
 
14
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/TheBloke/dolphin-2.2.1-mistral-7B-GGUF/resolve/main/dolphin-2.2.1-mistral-7b.Q5_K_M.gguf?download=true -d /content/text-generation-webui/models/ -o dolphin-2.2.1-mistral-7b.Q5_K_M.gguf
15
 
16
  %cd /content/text-generation-webui
17
+ python server.py --share --n-gpu-layers 1000000000 --model dolphin-2.2.1-mistral-7b.Q5_K_M.gguf