File size: 2,054 Bytes
d6c416b
48e31b6
3e312b7
dd34b85
6910501
3a423b8
d6c416b
3e312b7
 
 
d6c416b
 
 
46dd2a1
6910501
d6c416b
6910501
d6c416b
6910501
 
 
 
 
 
3a423b8
6910501
d6c416b
 
 
 
 
3e312b7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d6c416b
3a423b8
3e312b7
 
d6c416b
 
3a423b8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
from openai import OpenAI
from params import OPENAI_MODEL, OPENAI_API_KEY
import llamanet

# Create an instance of the OpenAI class
client = OpenAI(api_key=OPENAI_API_KEY)

# Initialize LlamaNet client
llamanet_client = llamanet.Client()

def send_to_chatgpt(msg_list):
    try:
        completion = client.chat.completions.create(
            model=OPENAI_MODEL,
            messages=msg_list,
            temperature=0.6,
            stream=True
        )
        
        chatgpt_response = ""
        for chunk in completion:
            if chunk.choices[0].delta.content is not None:
                chatgpt_response += chunk.choices[0].delta.content
        
        # Note: Usage information might not be available with LlamaNet
        chatgpt_usage = None
        return chatgpt_response, chatgpt_usage
    except Exception as e:
        print(f"Error in send_to_chatgpt: {str(e)}")
        return f"Error: {str(e)}", None

def send_to_llamanet(msg_list):
    try:
        # Convert msg_list to the format expected by LlamaNet
        llamanet_messages = [{"role": msg["role"], "content": msg["content"]} for msg in msg_list]
        
        # Send request to LlamaNet
        response = llamanet_client.chat.completions.create(
            model="llamanet",
            messages=llamanet_messages,
            stream=True
        )
        
        llamanet_response = ""
        for chunk in response:
            if chunk.choices[0].delta.content is not None:
                llamanet_response += chunk.choices[0].delta.content
        
        # LlamaNet doesn't provide usage information
        llamanet_usage = None
        return llamanet_response, llamanet_usage
    except Exception as e:
        print(f"Error in send_to_llamanet: {str(e)}")
        return f"Error: {str(e)}", None

def send_to_llm(provider, msg_list):
    if provider == "llamanet":
        return send_to_llamanet(msg_list)
    elif provider == "openai":
        return send_to_chatgpt(msg_list)
    else:
        raise ValueError(f"Unknown provider: {provider}")