KvrParaskevi's picture
Update app.py
9f06e2c verified
raw
history blame contribute delete
No virus
2.02 kB
import os
import streamlit as st
import model as demo_chat
import request as re
from transformers import AutoModelForCausalLM, AutoTokenizer
st.title("Hi, I am Chatbot Philio :woman:")
st.write("I am your hotel booking assistant. Feel free to start chatting with me.")
scrollable_div_style = """
<style>
.scrollable-div {
height: 200px; /* Adjust the height as needed */
overflow-y: auto; /* Enable vertical scrolling */
padding: 5px;
border: 1px solid #ccc; /* Optional: adds a border around the div */
border-radius: 5px; /* Optional: rounds the corners of the border */
}
</style>
"""
#llm_chain = demo_chat.chain()
def render_chat_history(chat_history):
#renders chat history
for message in chat_history:
if(message["role"]!= "system"):
with st.chat_message(message["role"]):
st.markdown(message["content"])
#Check if chat history exists in this session
if 'chat_history' not in st.session_state:
st.session_state.chat_history = [] #Initialize chat history
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True) #add css style to container
render_chat_history(st.session_state.chat_history)
#Input field for chat interface
if input_text := st.chat_input(placeholder="Here you can chat with our hotel booking model."):
with st.chat_message("user"):
st.markdown(input_text)
st.session_state.chat_history.append({"role" : "human", "content" : input_text}) #append message to chat history
with st.spinner("Generating response..."):
#first_answer = llm_chain.predict(input = input_text)
#answer = first_answer.strip()
prompt = demo_chat.chat_template_prompt()
input = prompt + input_text + "Assistant:"
answer = re.generate_response(input)
with st.chat_message("assistant"):
st.markdown(answer)
st.session_state.chat_history.append({"role": "ai", "content": answer})
st.markdown('</div>', unsafe_allow_html=True)