import streamlit as st from langchain.chat_models import ChatOpenAI from langchain.schema import SystemMessage, HumanMessage, AIMessage import instruct # From here down is all the StreamLit UI. st.set_page_config(page_title="Entz's LLM LangChain-OpenAI", page_icon=":robot_face:") st.markdown("

My Kidbot

", unsafe_allow_html=True) st.markdown("

Chat with my 5-Year-Old droid

", unsafe_allow_html=True) st.markdown("

By Lorentz Yeung

", unsafe_allow_html=True) # put a presumptions for ai to the streamlit session state # st.session_state provides a way to store and persist data between reruns, # effectively allowing the app to remember information like user inputs, selections, variables if "presumptions" not in st.session_state: st.session_state.presumptions = [ SystemMessage(content=instruct.instruct) ] def load_answer(question): st.session_state.presumptions.append(HumanMessage(content=question)) assistant_answer = chat(st.session_state.presumptions ) # store the new answer the presumption list st.session_state.presumptions.append(AIMessage(content=assistant_answer.content)) return assistant_answer.content def get_text(): input_text = st.text_input("Ask me question please~ ", "How old are you little one?", key= input) return input_text chat = ChatOpenAI(temperature=0) user_input=get_text() submit = st.button('Little girl answers: ') if submit: response = load_answer(user_input) st.subheader("Answer:") st.write(response,key= 1)