|
from openai import OpenAI |
|
from params import OPENAI_MODEL, OPENAI_API_KEY |
|
|
|
|
|
client = OpenAI(api_key=OPENAI_API_KEY) |
|
|
|
def send_to_chatgpt(msg_list): |
|
try: |
|
completion = client.chat.completions.create( |
|
model=OPENAI_MODEL, |
|
messages=msg_list, |
|
temperature=0.6, |
|
stream=True |
|
) |
|
|
|
chatgpt_response = "" |
|
for chunk in completion: |
|
if chunk.choices[0].delta.content is not None: |
|
chatgpt_response += chunk.choices[0].delta.content |
|
|
|
|
|
chatgpt_usage = None |
|
return chatgpt_response, chatgpt_usage |
|
except Exception as e: |
|
print(f"Error in send_to_chatgpt: {str(e)}") |
|
return f"Error: {str(e)}", None |
|
|
|
def send_to_llm(provider, msg_list): |
|
if provider == "llamanet": |
|
return send_to_chatgpt(msg_list) |
|
else: |
|
raise ValueError(f"Unknown provider: {provider}") |
|
|