Severian commited on
Commit
6e78bd0
1 Parent(s): a04654d

Update llm_handler.py

Browse files
Files changed (1) hide show
  1. llm_handler.py +25 -28
llm_handler.py CHANGED
@@ -1,34 +1,31 @@
1
- from openai import OpenAI
2
- import llamanet
 
 
3
 
4
- # Initialize LlamaNet
5
- llamanet.run()
6
 
7
- # Create an instance of the OpenAI class (used by LlamaNet)
8
- client = OpenAI()
9
 
10
- def send_to_llamanet(msg_list):
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  try:
12
- completion = client.chat.completions.create(
13
- model="https://huggingface.co/arcee-ai/Arcee-Spark-GGUF/blob/main/Arcee-Spark-IQ4_XS.gguf",
14
- messages=msg_list,
15
- temperature=0.6,
16
- stream=True
17
- )
18
-
19
- response = ""
20
- for chunk in completion:
21
- if chunk.choices[0].delta.content is not None:
22
- response += chunk.choices[0].delta.content
23
-
24
- return response, None # LlamaNet doesn't provide usage info
25
-
26
  except Exception as e:
27
- print(f"Error in send_to_llamanet: {str(e)}")
28
  return f"Error: {str(e)}", None
29
-
30
- def send_to_llm(provider, msg_list):
31
- if provider == "llamanet":
32
- return send_to_llamanet(msg_list)
33
- else:
34
- raise ValueError(f"Unknown provider: {provider}")
 
1
+ from llama_cpp import Llama
2
+ from llama_cpp_agent import LlamaCppAgent
3
+ from llama_cpp_agent import MessagesFormatterType
4
+ from llama_cpp_agent.providers import LlamaCppPythonProvider
5
 
6
+ # Initialize the Llama model
7
+ llama_model = Llama("Arcee-Spark-GGUF/Arcee-Spark-Q4_K_M.gguf", n_batch=1024, n_threads=10, n_gpu_layers=33, n_ctx=2048, verbose=False)
8
 
9
+ # Create the provider
10
+ provider = LlamaCppPythonProvider(llama_model)
11
 
12
+ # Create the agent
13
+ agent = LlamaCppAgent(
14
+ provider,
15
+ system_prompt="You are a helpful assistant.",
16
+ predefined_messages_formatter_type=MessagesFormatterType.CHATML,
17
+ debug_output=True
18
+ )
19
+
20
+ # Set provider settings
21
+ settings = provider.get_provider_default_settings()
22
+ settings.max_tokens = 2000
23
+ settings.stream = True
24
+
25
+ def send_to_llm(provider, msg_list):
26
  try:
27
+ response = agent.chat(msg_list, settings=settings)
28
+ return response.content, None # We don't have usage info in this case
 
 
 
 
 
 
 
 
 
 
 
 
29
  except Exception as e:
30
+ print(f"Error in send_to_llm: {str(e)}")
31
  return f"Error: {str(e)}", None