Shreyas094 commited on
Commit
58569df
1 Parent(s): 658f52f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -23,9 +23,6 @@ from langchain_community.llms import HuggingFaceHub
23
  from langchain_core.documents import Document
24
  from sentence_transformers import SentenceTransformer
25
  from llama_parse import LlamaParse
26
- import asyncio
27
- import nest_asyncio
28
- from your_module import EnhancedContextDrivenChatbot, ask_question, update_vectors, clear_cache
29
 
30
  huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")
31
  llama_cloud_api_key = os.environ.get("LLAMA_CLOUD_API_KEY")
@@ -462,7 +459,7 @@ with gr.Blocks() as demo:
462
  return "", history
463
 
464
  submit_button.click(chat, inputs=[question_input, chatbot, temperature_slider, top_p_slider, repetition_penalty_slider, web_search_checkbox], outputs=[question_input, chatbot])
465
-
466
  clear_button = gr.Button("Clear Cache")
467
  clear_output = gr.Textbox(label="Cache Status")
468
  clear_button.click(clear_cache, inputs=[], outputs=clear_output)
 
23
  from langchain_core.documents import Document
24
  from sentence_transformers import SentenceTransformer
25
  from llama_parse import LlamaParse
 
 
 
26
 
27
  huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")
28
  llama_cloud_api_key = os.environ.get("LLAMA_CLOUD_API_KEY")
 
459
  return "", history
460
 
461
  submit_button.click(chat, inputs=[question_input, chatbot, temperature_slider, top_p_slider, repetition_penalty_slider, web_search_checkbox], outputs=[question_input, chatbot])
462
+
463
  clear_button = gr.Button("Clear Cache")
464
  clear_output = gr.Textbox(label="Cache Status")
465
  clear_button.click(clear_cache, inputs=[], outputs=clear_output)