Update app.py
Browse files
app.py
CHANGED
@@ -65,35 +65,43 @@ vectorstore = get_vectorstore(docs)
|
|
65 |
|
66 |
|
67 |
|
68 |
-
def main(
|
69 |
-
|
70 |
-
|
71 |
-
st.set_page_config(page_title="Conversational RAG Chatbot", page_icon=":robot:")
|
72 |
-
st.title("Conversational RAG Chatbot")
|
73 |
|
74 |
-
|
|
|
|
|
75 |
|
76 |
-
|
77 |
-
|
|
|
|
|
78 |
|
79 |
-
|
80 |
-
|
81 |
-
|
|
|
82 |
|
83 |
-
conversation_chain = create_conversational_rag_chain(vectorstore)
|
84 |
-
input_dict = {"input": prompt, "chat_history": msgs.messages}
|
85 |
-
config = {"configurable": {"session_id": "any"}}
|
86 |
-
response = conversation_chain.invoke(input_dict, config)
|
87 |
-
st.chat_message("ai").write(response["answer"])
|
88 |
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
93 |
else:
|
94 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
-
st.session_state["chat_history"] = msgs
|
97 |
|
98 |
def create_conversational_rag_chain(vectorstore):
|
99 |
|
@@ -135,14 +143,7 @@ def create_conversational_rag_chain(vectorstore):
|
|
135 |
rag_chain = create_retrieval_chain(ha_retriever, question_answer_chain)
|
136 |
msgs = StreamlitChatMessageHistory(key="special_app_key")
|
137 |
|
138 |
-
|
139 |
-
rag_chain,
|
140 |
-
lambda session_id: msgs,
|
141 |
-
input_messages_key="input",
|
142 |
-
history_messages_key="chat_history",
|
143 |
-
output_messages_key="answer",
|
144 |
-
)
|
145 |
-
return conversation_chain
|
146 |
|
147 |
|
148 |
|
|
|
65 |
|
66 |
|
67 |
|
68 |
+
def main():
|
|
|
|
|
|
|
|
|
69 |
|
70 |
+
st.set_page_config(page_title="Chat with multiple PDFs",
|
71 |
+
page_icon=":books:")
|
72 |
+
st.write(css, unsafe_allow_html=True)
|
73 |
|
74 |
+
if "conversation" not in st.session_state:
|
75 |
+
st.session_state.conversation = None
|
76 |
+
if "chat_history" not in st.session_state:
|
77 |
+
st.session_state.chat_history = None
|
78 |
|
79 |
+
st.header("Chat with multiple PDFs :books:")
|
80 |
+
user_question = st.text_input("Ask a question about your documents:")
|
81 |
+
if user_question:
|
82 |
+
handle_userinput(user_question)
|
83 |
|
|
|
|
|
|
|
|
|
|
|
84 |
|
85 |
+
def handle_userinput(user_question,vectorstore):
|
86 |
+
Rag_chain create_conversational_rag_chain()
|
87 |
+
response = Rag_chain({'question': user_question})
|
88 |
+
st.session_state.chat_history = response['chat_history']
|
89 |
+
|
90 |
+
for i, message in enumerate(st.session_state.chat_history):
|
91 |
+
if i % 2 == 0:
|
92 |
+
st.write(user_template.replace(
|
93 |
+
"{{MSG}}", message.content), unsafe_allow_html=True)
|
94 |
else:
|
95 |
+
st.write(bot_template.replace(
|
96 |
+
"{{MSG}}", message.content), unsafe_allow_html=True)
|
97 |
+
|
98 |
+
if 'retrieved_documents' in response:
|
99 |
+
st.subheader("Retrieved Documents")
|
100 |
+
for doc in response['source_documents']:
|
101 |
+
st.write(f"Document: {doc.metadata['source']}")
|
102 |
+
st.write(doc.page_content)
|
103 |
+
|
104 |
|
|
|
105 |
|
106 |
def create_conversational_rag_chain(vectorstore):
|
107 |
|
|
|
143 |
rag_chain = create_retrieval_chain(ha_retriever, question_answer_chain)
|
144 |
msgs = StreamlitChatMessageHistory(key="special_app_key")
|
145 |
|
146 |
+
return rag_chain
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
|
148 |
|
149 |
|