Severian commited on
Commit
dcda0a3
1 Parent(s): 55f36da

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +3 -3
main.py CHANGED
@@ -3,7 +3,7 @@ import json
3
  import numpy as np
4
  from wiki import search as search_wikipedia
5
  from concurrent.futures import ThreadPoolExecutor
6
- from llm_handler import send_to_llm
7
  from params import OUTPUT_FILE_PATH, NUM_WORKERS
8
  from system_messages import SYSTEM_MESSAGES_VODALUS
9
  from topics import TOPICS
@@ -50,7 +50,7 @@ async def generate_data(
50
  msg_list = [msg_context, {"role": "user", "content": f"Generate a question based on the SUBJECT_AREA: {topic_selected}"}]
51
 
52
  # Send to LLM for question generation
53
- question, _ = send_to_llm("llamanet", msg_list)
54
 
55
  # Prepare message list for LLM to generate the answer
56
  msg_list_answer = [
@@ -59,7 +59,7 @@ async def generate_data(
59
  ]
60
 
61
  # Send to LLM for answer generation
62
- answer, _ = send_to_llm("llamanet", msg_list_answer)
63
 
64
  # Prepare data for output (excluding usage information)
65
  data = {
 
3
  import numpy as np
4
  from wiki import search as search_wikipedia
5
  from concurrent.futures import ThreadPoolExecutor
6
+ from llm_handler import send_to_llm, agent, settings
7
  from params import OUTPUT_FILE_PATH, NUM_WORKERS
8
  from system_messages import SYSTEM_MESSAGES_VODALUS
9
  from topics import TOPICS
 
50
  msg_list = [msg_context, {"role": "user", "content": f"Generate a question based on the SUBJECT_AREA: {topic_selected}"}]
51
 
52
  # Send to LLM for question generation
53
+ question, _ = send_to_llm(agent, msg_list)
54
 
55
  # Prepare message list for LLM to generate the answer
56
  msg_list_answer = [
 
59
  ]
60
 
61
  # Send to LLM for answer generation
62
+ answer, _ = send_to_llm(agent, msg_list_answer)
63
 
64
  # Prepare data for output (excluding usage information)
65
  data = {