Vodalus / llm_handler.py
Severian's picture
Update llm_handler.py
0055e2e verified
raw
history blame
1.02 kB
from openai import OpenAI
import llamanet
# Initialize LlamaNet
llamanet.run()
# Create an instance of the OpenAI class (used by LlamaNet)
client = OpenAI()
def send_to_llamanet(msg_list):
try:
completion = client.chat.completions.create(
model="https://huggingface.co/arcee-ai/Arcee-Spark-GGUF/blob/main/Arcee-Spark-IQ4_XS.gguf",
messages=msg_list,
temperature=0.6,
stream=True
)
response = ""
for chunk in completion:
if chunk.choices[0].delta.content is not None:
response += chunk.choices[0].delta.content
return response, None # LlamaNet doesn't provide usage info
except Exception as e:
print(f"Error in send_to_llamanet: {str(e)}")
return f"Error: {str(e)}", None
def send_to_llm(provider, msg_list):
if provider == "llamanet":
return send_to_llamanet(msg_list)
else:
raise ValueError(f"Unknown provider: {provider}")