Update app.py
Browse files
app.py
CHANGED
@@ -135,14 +135,13 @@ def create_conversational_rag_chain(vectorstore):
|
|
135 |
which can be understood without the chat history. Do NOT answer the question,
|
136 |
just reformulate it if needed and otherwise return it as is."""
|
137 |
|
138 |
-
|
139 |
contextualize_q_prompt = ChatPromptTemplate.from_messages(
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
)
|
146 |
|
147 |
ha_retriever = create_history_aware_retriever(llm, vectorstore.as_retriever(), contextualize_q_prompt)
|
148 |
|
@@ -156,6 +155,7 @@ def create_conversational_rag_chain(vectorstore):
|
|
156 |
]
|
157 |
)
|
158 |
|
|
|
159 |
question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
|
160 |
|
161 |
rag_chain = create_retrieval_chain(ha_retriever, question_answer_chain)
|
|
|
135 |
which can be understood without the chat history. Do NOT answer the question,
|
136 |
just reformulate it if needed and otherwise return it as is."""
|
137 |
|
|
|
138 |
contextualize_q_prompt = ChatPromptTemplate.from_messages(
|
139 |
+
[
|
140 |
+
("system", contextualize_q_system_prompt),
|
141 |
+
MessagesPlaceholder("chat_history"),
|
142 |
+
("human", "{input}"),
|
143 |
+
]
|
144 |
+
)
|
145 |
|
146 |
ha_retriever = create_history_aware_retriever(llm, vectorstore.as_retriever(), contextualize_q_prompt)
|
147 |
|
|
|
155 |
]
|
156 |
)
|
157 |
|
158 |
+
|
159 |
question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)
|
160 |
|
161 |
rag_chain = create_retrieval_chain(ha_retriever, question_answer_chain)
|