ppsingh commited on
Commit
2703396
1 Parent(s): 958c322

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -8,7 +8,7 @@ import re
8
  import json
9
  from auditqa.sample_questions import QUESTIONS
10
  from auditqa.reports import POSSIBLE_REPORTS
11
- from auditqa.engine.prompts import audience_prompts, answer_prompt_template, llama3_prompt_template, system_propmt, user_propmt
12
  from auditqa.doc_process import process_pdf
13
  from langchain_core.prompts import ChatPromptTemplate
14
  from langchain_core.output_parsers import StrOutputParser
@@ -72,7 +72,7 @@ async def chat(query,history,sources,reports):
72
 
73
  # get prompt
74
 
75
- prompt = ChatPromptTemplate.from_template(llama3_prompt_template.format(system_prompt=system_propmt,user_prompt=user_propmt))
76
 
77
  # get llm
78
  # llm_qa = HuggingFaceEndpoint(
@@ -89,8 +89,7 @@ async def chat(query,history,sources,reports):
89
  llm_qa = HuggingFaceEndpoint(
90
  endpoint_url= "https://mnczdhmrf7lkfd9d.eu-west-1.aws.endpoints.huggingface.cloud",
91
  task="text-generation",
92
- huggingfacehub_api_token=HF_token,
93
- model_kwargs = {'stop':["<|eot_id|>"]})
94
 
95
 
96
  # create rag chain
 
8
  import json
9
  from auditqa.sample_questions import QUESTIONS
10
  from auditqa.reports import POSSIBLE_REPORTS
11
+ from auditqa.engine.prompts import audience_prompts, answer_prompt_template, llama3_prompt
12
  from auditqa.doc_process import process_pdf
13
  from langchain_core.prompts import ChatPromptTemplate
14
  from langchain_core.output_parsers import StrOutputParser
 
72
 
73
  # get prompt
74
 
75
+ prompt = ChatPromptTemplate.from_template(llama3_prompt)
76
 
77
  # get llm
78
  # llm_qa = HuggingFaceEndpoint(
 
89
  llm_qa = HuggingFaceEndpoint(
90
  endpoint_url= "https://mnczdhmrf7lkfd9d.eu-west-1.aws.endpoints.huggingface.cloud",
91
  task="text-generation",
92
+ huggingfacehub_api_token=HF_token)
 
93
 
94
 
95
  # create rag chain