awacke1's picture
Update app.py
a260e6a
raw
history blame
5.59 kB
import streamlit as st
import openai
import os
import base64
import glob
import json
import mistune
import pytz
from datetime import datetime
from openai import ChatCompletion
from xml.etree import ElementTree as ET
from bs4 import BeautifulSoup
openai.api_key = os.getenv('OPENAI_KEY')
st.set_page_config(
page_title="GPT Streamlit Document Reasoner",
layout="wide")
menu = ["txt", "htm", "md", "py"]
choice = st.sidebar.selectbox("Choose output file type to save results", menu)
choicePrefix = "Output and download file set to "
if choice == "txt":
st.sidebar.write(choicePrefix + "Text file.")
elif choice == "htm":
st.sidebar.write(choicePrefix + "HTML5.")
elif choice == "md":
st.sidebar.write(choicePrefix + "Markdown.")
elif choice == "py":
st.sidebar.write(choicePrefix + "Python Code.")
max_length = st.sidebar.slider("Max document length", min_value=1000, max_value=32000, value=2000, step=1000)
def truncate_document(document, length):
return document[:length]
def chat_with_model(prompts):
model = "gpt-3.5-turbo"
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
response = openai.ChatCompletion.create(model=model, messages=conversation)
return response['choices'][0]['message']['content']
def generate_filename(prompt, file_type):
central = pytz.timezone('US/Central')
safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
safe_prompt = "".join(x for x in prompt if x.isalnum())[:28]
return f"{safe_date_time}_{safe_prompt}.{file_type}"
def create_file(filename, prompt, response):
if filename.endswith(".txt"):
with open(filename, 'w') as file:
file.write(f"Prompt:\n{prompt}\nResponse:\n{response}")
elif filename.endswith(".htm"):
with open(filename, 'w') as file:
file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
elif filename.endswith(".md"):
with open(filename, 'w') as file:
file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
def get_table_download_link(file_path):
with open(file_path, 'r') as file:
data = file.read()
b64 = base64.b64encode(data.encode()).decode()
file_name = os.path.basename(file_path)
ext = os.path.splitext(file_name)[1] # get the file extension
if ext == '.txt':
mime_type = 'text/plain'
elif ext == '.htm':
mime_type = 'text/html'
elif ext == '.md':
mime_type = 'text/markdown'
else:
mime_type = 'application/octet-stream' # general binary data type
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
return href
def CompressXML(xml_text):
root = ET.fromstring(xml_text)
for elem in list(root.iter()):
if isinstance(elem.tag, str) and 'Comment' in elem.tag:
elem.parent.remove(elem)
return ET.tostring(root, encoding='unicode', method="xml")
def read_file_content(file,max_length):
if file.type == "application/json":
content = json.load(file)
return str(content)
elif file.type == "text/html" or file.type == "text/htm":
content = BeautifulSoup(file, "html.parser")
return content.text
elif file.type == "application/xml" or file.type == "text/xml":
tree = ET.parse(file)
root = tree.getroot()
xml = CompressXML(ET.tostring(root, encoding='unicode'))
return xml
elif file.type == "text/markdown" or file.type == "text/md":
md = mistune.create_markdown()
content = md(file.read().decode())
return content
elif file.type == "text/plain":
return file.getvalue().decode()
else:
return ""
def main():
prompts = ['']
file_content = ""
user_prompt = st.text_area("Your question:", '', height=100)
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
if user_prompt:
prompts.append(user_prompt)
if uploaded_file is not None:
file_content = read_file_content(uploaded_file, max_length)
prompts.append(file_content)
if st.button('πŸ’¬ Chat'):
st.write('Thinking and Reasoning with your inputs...')
total_input = "".join(prompts)
if len(total_input) > 4000:
chunks = [total_input[i:i + max_length] for i in range(0, len(total_input), max_length)]
responses = []
for chunk in chunks:
responses.append(chat_with_model([chunk]))
response = "\n".join(responses)
else:
response = chat_with_model(prompts)
st.write('Response:')
st.write(response)
filename = generate_filename(user_prompt, choice)
create_file(filename, user_prompt, response)
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
if len(file_content) > 0:
st.markdown(f"**File Content Added:**\n{file_content}")
all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
for file in all_files:
col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
with col1:
st.markdown(get_table_download_link(file), unsafe_allow_html=True)
with col2:
if st.button("πŸ—‘", key=file):
os.remove(file)
st.experimental_rerun()
if __name__ == "__main__":
main()