Shreyas094 commited on
Commit
8f325c3
1 Parent(s): 8b01918

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -51
app.py CHANGED
@@ -227,66 +227,58 @@ def ask_question(question, temperature, top_p, repetition_penalty, web_search):
227
  if not question:
228
  return "Please enter a question."
229
 
230
- if question in memory_database and not web_search:
231
- answer = memory_database[question]
232
- else:
233
- model = get_model(temperature, top_p, repetition_penalty)
234
- embed = get_embeddings()
235
-
236
- if web_search:
237
- search_results = google_search(question)
238
- context_str = "\n".join([result["text"] for result in search_results if result["text"]])
239
-
240
- # Convert web search results to Document format
241
- web_docs = [Document(page_content=result["text"], metadata={"source": result["link"]}) for result in search_results if result["text"]]
242
 
243
- # Create a temporary FAISS database for web search results
244
- temp_database = FAISS.from_documents(web_docs, embed)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
245
 
246
- retriever = temp_database.as_retriever()
 
 
 
247
  relevant_docs = retriever.get_relevant_documents(question)
248
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
249
 
250
- prompt_template = """
251
- Answer the question based on the following web search results:
252
- Web Search Results:
253
- {context}
254
- Current Question: {question}
255
- If the web search results don't contain relevant information, state that the information is not available in the search results.
256
- Provide a concise and direct answer to the question without mentioning the web search or these instructions:
257
- """
258
- prompt_val = ChatPromptTemplate.from_template(prompt_template)
259
- formatted_prompt = prompt_val.format(context=context_str, question=question)
260
- else:
261
- # Check if the FAISS database exists
262
- if os.path.exists("faiss_database"):
263
- database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
264
- else:
265
- return "No FAISS database found. Please upload documents to create the vector store."
266
-
267
- history_str = "\n".join([f"Q: {item['question']}\nA: {item['answer']}" for item in conversation_history])
268
-
269
- if is_related_to_history(question, conversation_history):
270
- context_str = "No additional context needed. Please refer to the conversation history."
271
- else:
272
- retriever = database.as_retriever()
273
- relevant_docs = retriever.get_relevant_documents(question)
274
- context_str = "\n".join([doc.page_content for doc in relevant_docs])
275
-
276
- prompt_val = ChatPromptTemplate.from_template(prompt)
277
- formatted_prompt = prompt_val.format(history=history_str, context=context_str, question=question)
278
-
279
- answer = generate_chunked_response(model, formatted_prompt)
280
- answer = re.split(r'Question:|Current Question:', answer)[-1].strip()
281
 
282
- # Remove any remaining prompt instructions from the answer
283
- answer_lines = answer.split('\n')
284
- answer = '\n'.join(line for line in answer_lines if not line.startswith('If') and not line.startswith('Provide'))
285
 
286
- if not web_search:
287
- memory_database[question] = answer
 
288
 
289
  if not web_search:
 
290
  conversation_history = manage_conversation_history(question, answer, conversation_history)
291
 
292
  return answer
 
227
  if not question:
228
  return "Please enter a question."
229
 
230
+ model = get_model(temperature, top_p, repetition_penalty)
231
+ embed = get_embeddings()
 
 
 
 
 
 
 
 
 
 
232
 
233
+ # Check if the FAISS database exists, if not create an empty one
234
+ if os.path.exists("faiss_database"):
235
+ database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
236
+ else:
237
+ database = FAISS.from_documents([], embed)
238
+ database.save_local("faiss_database")
239
+
240
+ if web_search:
241
+ search_results = google_search(question)
242
+ web_docs = [Document(page_content=result["text"], metadata={"source": result["link"]}) for result in search_results if result["text"]]
243
+
244
+ # Add web search results to the existing database
245
+ database.add_documents(web_docs)
246
+ database.save_local("faiss_database")
247
+
248
+ context_str = "\n".join([doc.page_content for doc in web_docs])
249
+
250
+ prompt_template = """
251
+ Answer the question based on the following web search results:
252
+ Web Search Results:
253
+ {context}
254
+ Current Question: {question}
255
+ If the web search results don't contain relevant information, state that the information is not available in the search results.
256
+ Provide a concise and direct answer to the question without mentioning the web search or these instructions:
257
+ """
258
+ prompt_val = ChatPromptTemplate.from_template(prompt_template)
259
+ formatted_prompt = prompt_val.format(context=context_str, question=question)
260
+ else:
261
+ history_str = "\n".join([f"Q: {item['question']}\nA: {item['answer']}" for item in conversation_history])
262
 
263
+ if is_related_to_history(question, conversation_history):
264
+ context_str = "No additional context needed. Please refer to the conversation history."
265
+ else:
266
+ retriever = database.as_retriever()
267
  relevant_docs = retriever.get_relevant_documents(question)
268
  context_str = "\n".join([doc.page_content for doc in relevant_docs])
269
 
270
+ prompt_val = ChatPromptTemplate.from_template(prompt)
271
+ formatted_prompt = prompt_val.format(history=history_str, context=context_str, question=question)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272
 
273
+ answer = generate_chunked_response(model, formatted_prompt)
274
+ answer = re.split(r'Question:|Current Question:', answer)[-1].strip()
 
275
 
276
+ # Remove any remaining prompt instructions from the answer
277
+ answer_lines = answer.split('\n')
278
+ answer = '\n'.join(line for line in answer_lines if not line.startswith('If') and not line.startswith('Provide'))
279
 
280
  if not web_search:
281
+ memory_database[question] = answer
282
  conversation_history = manage_conversation_history(question, answer, conversation_history)
283
 
284
  return answer