import streamlit as st import openai import os import base64 import glob import json from xml.etree import ElementTree as ET from datetime import datetime from dotenv import load_dotenv from openai import ChatCompletion load_dotenv() openai.api_key = os.getenv('OPENAI_KEY') def chat_with_model(prompts): model = "gpt-3.5-turbo" conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}] conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts]) response = openai.ChatCompletion.create(model=model, messages=conversation) return response['choices'][0]['message']['content'] def generate_filename(prompt): safe_date_time = datetime.now().strftime("%m_%d_%H_%M") safe_prompt = "".join(x for x in prompt if x.isalnum())[:50] return f"{safe_date_time}_{safe_prompt}.htm" def create_file(filename, prompt, response): with open(filename, 'w') as file: file.write(f"

Prompt:

{prompt}

Response:

{response}

") def get_table_download_link(file_path): with open(file_path, 'r') as file: data = file.read() b64 = base64.b64encode(data.encode()).decode() href = f'{os.path.basename(file_path)}' return href def CompressXML(xml_text): tree = ET.ElementTree(ET.fromstring(xml_text)) for elem in tree.iter(): if isinstance(elem.tag, ET.Comment): elem.getparent().remove(elem) return ET.tostring(tree.getroot(), encoding='unicode') def read_file_content(file): if file.type == "application/json": content = json.load(file) return str(content) elif file.type == "text/html": content = BeautifulSoup(file, "html.parser") return content.text elif file.type == "application/xml" or file.type == "text/xml": xml_text = file.getvalue().decode() compressed_text = CompressXML(xml_text) return compressed_text elif file.type == "text/plain": return file.getvalue().decode() else: return "" def main(): st.title("Chat with AI") prompts = [''] user_prompt = st.text_area("Your question:", '', height=120) uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "htm", "txt"]) if user_prompt: prompts.append(user_prompt) if uploaded_file is not None: file_content = read_file_content(uploaded_file) st.markdown(f"**Content Added to Prompt:**\n{file_content}") prompts.append(file_content) if st.button('Chat'): st.write('Chatting with GPT-3...') response = chat_with_model(prompts) st.write('Response:') st.write(response) filename = generate_filename(user_prompt) create_file(filename, user_prompt, response) st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True) htm_files = glob.glob("*.htm") for file in htm_files: st.sidebar.mark User show full code listing ChatGPT Here is the full Python script: python Copy code import streamlit as st import openai import os import base64 import glob import json from xml.etree import ElementTree as ET from datetime import datetime from dotenv import load_dotenv from openai import ChatCompletion load_dotenv() openai.api_key = os.getenv('OPENAI_KEY') def chat_with_model(prompts): model = "gpt-3.5-turbo" conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}] conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts]) response = openai.ChatCompletion.create(model=model, messages=conversation) return response['choices'][0]['message']['content'] def generate_filename(prompt): safe_date_time = datetime.now().strftime("%m_%d_%H_%M") safe_prompt = "".join(x for x in prompt if x.isalnum())[:50] return f"{safe_date_time}_{safe_prompt}.htm" def create_file(filename, prompt, response): with open(filename, 'w') as file: file.write(f"

Prompt:

{prompt}

Response:

{response}

") def get_table_download_link(file_path): with open(file_path, 'r') as file: data = file.read() b64 = base64.b64encode(data.encode()).decode() href = f'{os.path.basename(file_path)}' return href def CompressXML(xml_text): tree = ET.ElementTree(ET.fromstring(xml_text)) for elem in tree.iter(): if isinstance(elem.tag, ET.Comment): elem.getparent().remove(elem) return ET.tostring(tree.getroot(), encoding='unicode') def read_file_content(file): if file.type == "application/json": content = json.load(file) return str(content) elif file.type == "text/html": content = BeautifulSoup(file, "html.parser") return content.text elif file.type == "application/xml" or file.type == "text/xml": xml_text = file.getvalue().decode() compressed_text = CompressXML(xml_text) return compressed_text elif file.type == "text/plain": return file.getvalue().decode() else: return "" def main(): st.title("Chat with AI") prompts = [''] user_prompt = st.text_area("Your question:", '', height=120) uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "htm", "txt"]) if user_prompt: prompts.append(user_prompt) if uploaded_file is not None: file_content = read_file_content(uploaded_file) prompts.append(file_content) if st.button('Chat'): st.write('Chatting with GPT-3...') response = chat_with_model(prompts) st.write('Response:') st.write(response) filename = generate_filename(user_prompt) create_file(filename, user_prompt, response) st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True) htm_files = glob.glob("*.htm") for file in htm_files: st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True) if st.sidebar.button(f"Delete {file}"): os.remove(file) st.experimental_rerun() if __name__ == "__main__": main()