Update main.py
Browse files
main.py
CHANGED
@@ -1,27 +1,42 @@
|
|
1 |
-
from ctransformers import AutoModelForCausalLM
|
2 |
from fastapi import FastAPI, Form
|
|
|
3 |
from pydantic import BaseModel
|
|
|
4 |
|
5 |
-
#Model loading
|
6 |
-
llm = AutoModelForCausalLM.from_pretrained(
|
7 |
-
|
8 |
-
|
9 |
-
|
|
|
10 |
)
|
11 |
-
|
12 |
|
13 |
-
#Pydantic object
|
14 |
-
class
|
15 |
prompt: str
|
16 |
-
|
17 |
-
#Fast API
|
18 |
app = FastAPI()
|
19 |
|
20 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
@app.post("/llm_on_cpu")
|
22 |
-
async def stream(item:
|
23 |
system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
|
24 |
E_INST = "</s>"
|
25 |
-
user, assistant = "
|
26 |
prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
|
27 |
return llm(prompt)
|
|
|
|
|
1 |
from fastapi import FastAPI, Form
|
2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
3 |
from pydantic import BaseModel
|
4 |
+
from transformers import AutoModelForCausalLM
|
5 |
|
6 |
+
# Model loading
|
7 |
+
llm = AutoModelForCausalLM.from_pretrained(
|
8 |
+
"zephyr-7b-beta.Q4_K_S.gguf",
|
9 |
+
model_type='mistral',
|
10 |
+
max_new_tokens=1096,
|
11 |
+
threads=3,
|
12 |
)
|
|
|
13 |
|
14 |
+
# Pydantic object
|
15 |
+
class Validation(BaseModel):
|
16 |
prompt: str
|
17 |
+
|
18 |
+
# Fast API
|
19 |
app = FastAPI()
|
20 |
|
21 |
+
# Set up CORS
|
22 |
+
origins = [
|
23 |
+
"http://localhost", # Replace with the address of your Flutter web app
|
24 |
+
"http://localhost:55345", # Add the port used by your Flutter web app
|
25 |
+
]
|
26 |
+
|
27 |
+
app.add_middleware(
|
28 |
+
CORSMiddleware,
|
29 |
+
allow_origins=origins,
|
30 |
+
allow_credentials=True,
|
31 |
+
allow_methods=["*"],
|
32 |
+
allow_headers=["*"],
|
33 |
+
)
|
34 |
+
|
35 |
+
# Zephyr completion
|
36 |
@app.post("/llm_on_cpu")
|
37 |
+
async def stream(item: Validation):
|
38 |
system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
|
39 |
E_INST = "</s>"
|
40 |
+
user, assistant = "", ""
|
41 |
prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
|
42 |
return llm(prompt)
|