ppsingh commited on
Commit
5be75f1
1 Parent(s): f059070

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -74,7 +74,7 @@ def start_chat(query,history):
74
  def finish_chat():
75
  return (gr.update(interactive = True,value = ""))
76
 
77
- async def chat(query,history,sources,reports):
78
  """taking a query and a message history, use a pipeline (reformulation, retriever, answering) to yield a tuple of:
79
  (messages in gradio format, messages in langchain format, source documents)"""
80
 
@@ -83,17 +83,17 @@ async def chat(query,history,sources,reports):
83
  #print(f"audience:{audience}")
84
  print(f"sources:{sources}")
85
  print(f"reports:{reports}")
 
 
86
  docs_html = ""
87
  output_query = ""
88
 
89
 
90
- if reports is not None:
91
- print(reports)
92
  print(sources)
93
  #vectorstore = vectorstores[sources]
94
  else:
95
  print(reports)
96
- print(sources)
97
  #vectorstore = vectorstores["allreports"]
98
 
99
 
@@ -304,12 +304,12 @@ with gr.Blocks(title="Audit Q&A", css= "style.css", theme=theme,elem_id = "main-
304
  # https://www.gradio.app/docs/gradio/textbox#event-listeners-arguments
305
  (textbox
306
  .submit(start_chat, [textbox,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_textbox")
307
- .then(chat, [textbox,chatbot, dropdown_sources,dropdown_reports], [chatbot,sources_textbox],concurrency_limit = 8,api_name = "chat_textbox")
308
  .then(finish_chat, None, [textbox],api_name = "finish_chat_textbox"))
309
 
310
  (examples_hidden
311
  .change(start_chat, [examples_hidden,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_examples")
312
- .then(chat, [examples_hidden,chatbot, dropdown_sources,dropdown_reports], [chatbot,sources_textbox],concurrency_limit = 8,api_name = "chat_examples")
313
  .then(finish_chat, None, [textbox],api_name = "finish_chat_examples")
314
  )
315
 
 
74
  def finish_chat():
75
  return (gr.update(interactive = True,value = ""))
76
 
77
+ async def chat(query,history,sources,reports,subtype,year):
78
  """taking a query and a message history, use a pipeline (reformulation, retriever, answering) to yield a tuple of:
79
  (messages in gradio format, messages in langchain format, source documents)"""
80
 
 
83
  #print(f"audience:{audience}")
84
  print(f"sources:{sources}")
85
  print(f"reports:{reports}")
86
+ print(f"reports:{subtype}")
87
+ print(f"reports:{year}")
88
  docs_html = ""
89
  output_query = ""
90
 
91
 
92
+ if len(reports) == 0:
 
93
  print(sources)
94
  #vectorstore = vectorstores[sources]
95
  else:
96
  print(reports)
 
97
  #vectorstore = vectorstores["allreports"]
98
 
99
 
 
304
  # https://www.gradio.app/docs/gradio/textbox#event-listeners-arguments
305
  (textbox
306
  .submit(start_chat, [textbox,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_textbox")
307
+ .then(chat, [textbox,chatbot, dropdown_sources,dropdown_reports,dropdown_category,dropdown_year], [chatbot,sources_textbox],concurrency_limit = 8,api_name = "chat_textbox")
308
  .then(finish_chat, None, [textbox],api_name = "finish_chat_textbox"))
309
 
310
  (examples_hidden
311
  .change(start_chat, [examples_hidden,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_examples")
312
+ .then(chat, [examples_hidden,chatbot, dropdown_sources,dropdown_reports,dropdown_category,dropdown_year], [chatbot,sources_textbox],concurrency_limit = 8,api_name = "chat_examples")
313
  .then(finish_chat, None, [textbox],api_name = "finish_chat_examples")
314
  )
315