File size: 4,640 Bytes
6e969ba
 
 
 
 
acc0c63
a88d27d
bf65701
5d39a3e
6e969ba
 
9103f91
 
5d39a3e
6e969ba
 
 
aef72ef
5751846
6e969ba
 
 
 
 
bf65701
 
 
 
10369ed
 
 
6e969ba
 
 
5d39a3e
6e969ba
 
9103f91
11b82b8
6e969ba
11b82b8
5d39a3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a06050e
9103f91
 
 
 
aef72ef
79257b4
aef72ef
a06050e
f8850ff
 
 
 
a88d27d
f8850ff
 
a06050e
9103f91
 
ba84adc
 
a88d27d
 
 
 
f8850ff
 
 
 
 
 
 
 
 
5751846
f8850ff
 
a88d27d
f8850ff
 
 
 
 
 
 
 
55c04e3
f8850ff
 
 
 
5751846
f8850ff
 
 
55c04e3
5751846
55c04e3
 
387d633
6e969ba
0979664
ef2466a
 
f3a97a2
9103f91
6e969ba
 
9103f91
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import streamlit as st
import openai
import os
import base64
import glob
import json
import mistune
import pytz

from datetime import datetime
from openai import ChatCompletion
from xml.etree import ElementTree as ET
from bs4 import BeautifulSoup

openai.api_key = os.getenv('OPENAI_KEY')

def chat_with_model(prompts):
    model = "gpt-3.5-turbo"
    #model = "gpt-4-32k" # 32k tokens between prompt and inference tokens
    conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
    conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
    response = openai.ChatCompletion.create(model=model, messages=conversation)
    return response['choices'][0]['message']['content']

def generate_filename(prompt):
    central = pytz.timezone('US/Central')
    safe_date_time = datetime.now(central).strftime("%m%d_%I_%M_%p")  
    safe_prompt = "".join(x for x in prompt if x.isalnum())[:30]
    return f"{safe_date_time}_{safe_prompt}.txt"
    
def create_file(filename, prompt, response):
    with open(filename, 'w') as file:
        file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")

def get_table_download_link_old(file_path):
    with open(file_path, 'r') as file:
        data = file.read()
    b64 = base64.b64encode(data.encode()).decode()  
    href = f'<a href="data:file/htm;base64,{b64}" target="_blank" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
    return href

def get_table_download_link(file_path):
    import os
    import base64
    with open(file_path, 'r') as file:
        data = file.read()
    b64 = base64.b64encode(data.encode()).decode()  
    file_name = os.path.basename(file_path)
    ext = os.path.splitext(file_name)[1]  # get the file extension

    if ext == '.txt':
        mime_type = 'text/plain'
    elif ext == '.htm':
        mime_type = 'text/html'
    elif ext == '.md':
        mime_type = 'text/markdown'
    else:
        mime_type = 'application/octet-stream'  # general binary data type

    href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
    return href

def CompressXML(xml_text):
    root = ET.fromstring(xml_text)
    for elem in list(root.iter()):
        if isinstance(elem.tag, str) and 'Comment' in elem.tag:
            elem.parent.remove(elem)
    #return ET.tostring(root, encoding='unicode', method="xml")
    return ET.tostring(root, encoding='unicode', method="xml")[:4000]  # hack - top N characters to keep context document under token max


def read_file_content(file):
    if file.type == "application/json":
        content = json.load(file)
        return str(content)
    elif file.type == "text/html" or file.type == "text/htm":
        content = BeautifulSoup(file, "html.parser")
        return content.text
    elif file.type == "application/xml" or file.type == "text/xml":
        tree = ET.parse(file)
        root = tree.getroot()
        #return ET.tostring(root, encoding='unicode')
        return CompressXML(ET.tostring(root, encoding='unicode'))
    elif file.type == "text/markdown" or file.type == "text/md":
        md = mistune.create_markdown()
        content = md(file.read().decode())
        return content
    elif file.type == "text/plain":
        return file.getvalue().decode()
    else:
        return ""

def main():
    st.title("Chat with AI")

    prompts = ['']
    file_content = ""

    user_prompt = st.text_area("Your question:", '', height=120)
    uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])

    if user_prompt:
        prompts.append(user_prompt)

    if uploaded_file is not None:
        file_content = read_file_content(uploaded_file)
        prompts.append(file_content)

    if st.button('💬 Chat'):
        st.write('Chatting with GPT-3...')
        response = chat_with_model(prompts)
        st.write('Response:')
        st.write(response)
        
        filename = generate_filename(user_prompt)
        create_file(filename, user_prompt, response)
        st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
    
    if len(file_content) > 0:
        st.markdown(f"**Content Added to Prompt:**\n{file_content}")
    
    htm_files = glob.glob("*.txt")
    for file in htm_files:
        st.sidebar.markdown(get_table_download_link(file), unsafe_allow_html=True)
        if st.sidebar.button(f"🗑Delete {file}"):
        #if st.sidebar.button("🗑 Delete"):
            os.remove(file)
            st.experimental_rerun()

if __name__ == "__main__":
    main()