ppsingh commited on
Commit
a5d5a26
1 Parent(s): 1598ceb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -20
app.py CHANGED
@@ -188,7 +188,7 @@ async def chat(query,history,sources,reports,subtype,year):
188
  callbacks = [StreamingStdOutCallbackHandler()]
189
  llm_qa = HuggingFaceEndpoint(
190
  endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
191
- max_new_tokens=512*3,
192
  top_k=10,
193
  top_p=0.95,
194
  typical_p=0.95,
@@ -200,26 +200,31 @@ async def chat(query,history,sources,reports,subtype,year):
200
  # create rag chain
201
  chat_model = ChatHuggingFace(llm=llm_qa)
202
  chain = chat_model | StrOutputParser()
203
-
204
- ###-------------------------- get answers ---------------------------------------
205
- answer_lst = []
206
- for question, context in zip(question_lst , context_retrieved_lst):
207
- answer = chain.invoke(messages)
208
- answer_lst.append(answer)
209
- docs_html = []
210
- for i, d in enumerate(context_retrieved, 1):
211
- docs_html.append(make_html_source(d, i))
212
- docs_html = "".join(docs_html)
213
-
214
- previous_answer = history[-1][1]
215
- previous_answer = previous_answer if previous_answer is not None else ""
216
- answer_yet = previous_answer + answer_lst[0]
217
- answer_yet = parse_output_llm_with_sources(answer_yet)
218
- history[-1] = (query,answer_yet)
219
-
220
- history = [tuple(x) for x in history]
221
 
222
- yield history,docs_html
 
 
 
 
 
 
 
 
 
 
223
  #process_pdf()
224
 
225
 
 
188
  callbacks = [StreamingStdOutCallbackHandler()]
189
  llm_qa = HuggingFaceEndpoint(
190
  endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
191
+ max_new_tokens=1024,
192
  top_k=10,
193
  top_p=0.95,
194
  typical_p=0.95,
 
200
  # create rag chain
201
  chat_model = ChatHuggingFace(llm=llm_qa)
202
  chain = chat_model | StrOutputParser()
203
+
204
+ try:
205
+ async for op in result:
206
+
207
+ ###-------------------------- get answers ---------------------------------------
208
+ answer_lst = []
209
+ for question, context in zip(question_lst , context_retrieved_lst):
210
+ answer = chain.invoke(messages)
211
+ answer_lst.append(answer)
212
+ docs_html = []
213
+ for i, d in enumerate(context_retrieved, 1):
214
+ docs_html.append(make_html_source(d, i))
215
+ docs_html = "".join(docs_html)
 
 
 
 
 
216
 
217
+ previous_answer = history[-1][1]
218
+ previous_answer = previous_answer if previous_answer is not None else ""
219
+ answer_yet = previous_answer + answer_lst[0]
220
+ answer_yet = parse_output_llm_with_sources(answer_yet)
221
+ history[-1] = (query,answer_yet)
222
+
223
+ history = [tuple(x) for x in history]
224
+
225
+ yield history,docs_html
226
+ except Exception as e:
227
+ raise gr.Error(f"{e}")
228
  #process_pdf()
229
 
230