Vodalus / llm_handler.py
Severian's picture
Update llm_handler.py
6e72edd verified
raw
history blame
2.61 kB
import requests
import json
from openai import OpenAI
from params import OPENAI_MODEL, OPENAI_KEY
import llamanet
# Add this at the top of the file
local_model_base_url = "http://localhost:11434/v1"
anything_llm_workspace = "<input-workspace-name-here>"
# Create an instance of the OpenAI class
client = OpenAI(api_key="dummy_key")
def set_local_model_base_url(url):
global local_model_base_url
local_model_base_url = url
def set_anything_llm_workspace(workspace):
global anything_llm_workspace
anything_llm_workspace = workspace
def send_to_chatgpt(msg_list):
try:
completion = client.chat.completions.create(
model='https://huggingface.co/arcee-ai/Arcee-Spark-GGUF/blob/main/Arcee-Spark-IQ4_XS.gguf', # This will use the llamanet model
messages=msg_list,
temperature=0.6,
stream=True
)
chatgpt_response = ""
for chunk in completion:
if chunk.choices[0].delta.content is not None:
chatgpt_response += chunk.choices[0].delta.content
# Note: Usage information might not be available with llamanet
chatgpt_usage = None
return chatgpt_response, chatgpt_usage
except Exception as e:
print(f"Error in send_to_chatgpt: {str(e)}")
return f"Error: {str(e)}", None
def send_to_anything_llm(msg_list):
url = f'http://localhost:3001/api/v1/workspace/{anything_llm_workspace}/chat'
headers = {
'accept': 'application/json',
'Authorization': 'Bearer 0MACR41-7804XQB-MGC1GS0-FGSKB44',
'Content-Type': 'application/json'
}
message_content = " ".join(msg["content"] for msg in msg_list if "content" in msg)
data = {
"message": message_content,
"mode": "chat"
}
data_json = json.dumps(data)
try:
response = requests.post(url, headers=headers, data=data_json)
response.raise_for_status() # Raise an exception for bad status codes
response_data = response.json()
chatgpt_response = response_data.get("textResponse")
chatgpt_usage = response_data.get("usage", {})
return chatgpt_response, chatgpt_usage
except requests.RequestException as e:
print(f"Error in send_to_anything_llm: {str(e)}")
return f"Error: {str(e)}", None
def send_to_llm(provider, msg_list):
if provider == "local-model":
return send_to_chatgpt(msg_list)
elif provider == "anything-llm":
return send_to_anything_llm(msg_list)
else:
raise ValueError(f"Unknown provider: {provider}")