awacke1's picture
Update app.py
afd67ae
raw
history blame
2.14 kB
import streamlit as st
import openai
import os
import base64
import glob
from datetime import datetime
from dotenv import load_dotenv
from openai import ChatCompletion
load_dotenv()
openai.api_key = os.getenv('OPENAI_KEY')
def chat_with_model(prompts):
model = "gpt-3.5-turbo"
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
response = openai.ChatCompletion.create(model=model, messages=conversation)
return response['choices'][0]['message']['content']
def generate_filename(prompt):
safe_date_time = datetime.now().strftime("%m_%d_%H_%M")
safe_prompt = "".join(x for x in prompt if x.isalnum())[:50]
return f"{safe_date_time}_{safe_prompt}.htm"
def create_file(filename, prompt, response):
with open(filename, 'w') as file:
file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
def get_table_download_link(file_path):
with open(file_path, 'r') as file:
data = file.read()
b64 = base64.b64encode(data.encode()).decode()
href = f'<a href="data:file/htm;base64,{b64}" target="_blank" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
return href
def main():
st.title("Chat with AI")
prompts = ['']
user_prompt = st.text_area("Your question:", '', height=120)
if user_prompt:
prompts.append(user_prompt)
if st.button('Chat'):
st.write('Chatting with GPT-3...')
response = chat_with_model(prompts)
st.write('Response:')
st.write(response)
filename = generate_filename(user_prompt)
create_file(filename, user_prompt, response)
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
htm_files = glob.glob("*.htm")
for file in htm_files:
st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True)
if st.sidebar.button(f"Delete {file}"):
os.remove(file)
st.experimental_rerun()
if __name__ == "__main__":
main()