awacke1 commited on
Commit
9f0de14
β€’
1 Parent(s): b635eaa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -28
app.py CHANGED
@@ -6,11 +6,12 @@ import glob
6
  import json
7
  import mistune
8
  import pytz
9
-
10
  from datetime import datetime
11
  from openai import ChatCompletion
12
  from xml.etree import ElementTree as ET
13
  from bs4 import BeautifulSoup
 
14
 
15
  openai.api_key = os.getenv('OPENAI_KEY')
16
  st.set_page_config(
@@ -31,16 +32,6 @@ elif choice == "py":
31
 
32
  max_length = st.sidebar.slider("Max document length", min_value=1000, max_value=32000, value=2000, step=1000)
33
 
34
- def truncate_document(document, length):
35
- return document[:length]
36
-
37
- def chat_with_model(prompts):
38
- model = "gpt-3.5-turbo"
39
- conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
40
- conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
41
- response = openai.ChatCompletion.create(model=model, messages=conversation)
42
- return response['choices'][0]['message']['content']
43
-
44
  def generate_filename(prompt, file_type):
45
  central = pytz.timezone('US/Central')
46
  safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
@@ -58,12 +49,20 @@ def create_file(filename, prompt, response):
58
  with open(filename, 'w') as file:
59
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
60
 
61
- def get_table_download_link_old(file_path):
62
- with open(file_path, 'r') as file:
63
- data = file.read()
64
- b64 = base64.b64encode(data.encode()).decode()
65
- href = f'<a href="data:file/htm;base64,{b64}" target="_blank" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
66
- return href
 
 
 
 
 
 
 
 
67
 
68
  def get_table_download_link(file_path):
69
  with open(file_path, 'r') as file:
@@ -82,6 +81,7 @@ def get_table_download_link(file_path):
82
  href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
83
  return href
84
 
 
85
  def CompressXML(xml_text):
86
  root = ET.fromstring(xml_text)
87
  for elem in list(root.iter()):
@@ -111,31 +111,44 @@ def read_file_content(file,max_length):
111
  return ""
112
 
113
  def main():
114
- prompts = ['']
115
- file_content = ""
116
  user_prompt = st.text_area("Your question:", '', height=120)
117
  uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
 
118
 
119
- if user_prompt:
120
- prompts.append(user_prompt)
121
 
122
  if uploaded_file is not None:
123
  file_content = read_file_content(uploaded_file, max_length)
124
- prompts.append(file_content)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
 
126
  if st.button('πŸ’¬ Chat'):
127
  st.write('Thinking and Reasoning with your inputs...')
128
- response = chat_with_model(prompts)
129
  st.write('Response:')
130
  st.write(response)
131
 
132
  filename = generate_filename(user_prompt, choice)
133
  create_file(filename, user_prompt, response)
134
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
135
-
136
- if len(file_content) > 0:
137
- st.markdown(f"**File Content Added:**\n{file_content}")
138
-
139
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
140
  for file in all_files:
141
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
@@ -147,4 +160,4 @@ def main():
147
  st.experimental_rerun()
148
 
149
  if __name__ == "__main__":
150
- main()
 
6
  import json
7
  import mistune
8
  import pytz
9
+ import math
10
  from datetime import datetime
11
  from openai import ChatCompletion
12
  from xml.etree import ElementTree as ET
13
  from bs4 import BeautifulSoup
14
+ from collections import deque
15
 
16
  openai.api_key = os.getenv('OPENAI_KEY')
17
  st.set_page_config(
 
32
 
33
  max_length = st.sidebar.slider("Max document length", min_value=1000, max_value=32000, value=2000, step=1000)
34
 
 
 
 
 
 
 
 
 
 
 
35
  def generate_filename(prompt, file_type):
36
  central = pytz.timezone('US/Central')
37
  safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
 
49
  with open(filename, 'w') as file:
50
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
51
 
52
+ def truncate_document(document, length):
53
+ return document[:length]
54
+
55
+ def divide_document(document, max_length):
56
+ return [document[i:i+max_length] for i in range(0, len(document), max_length)]
57
+
58
+ def chat_with_model(prompt, document_section):
59
+ model = "gpt-3.5-turbo"
60
+ conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
61
+ conversation.append({'role': 'user', 'content': prompt})
62
+ conversation.append({'role': 'assistant', 'content': document_section})
63
+ response = openai.ChatCompletion.create(model=model, messages=conversation)
64
+ return response['choices'][0]['message']['content']
65
+
66
 
67
  def get_table_download_link(file_path):
68
  with open(file_path, 'r') as file:
 
81
  href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
82
  return href
83
 
84
+
85
  def CompressXML(xml_text):
86
  root = ET.fromstring(xml_text)
87
  for elem in list(root.iter()):
 
111
  return ""
112
 
113
  def main():
 
 
114
  user_prompt = st.text_area("Your question:", '', height=120)
115
  uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
116
+ max_length = 4000
117
 
118
+ document_sections = deque()
119
+ document_responses = {}
120
 
121
  if uploaded_file is not None:
122
  file_content = read_file_content(uploaded_file, max_length)
123
+ document_sections.extend(divide_document(file_content, max_length))
124
+
125
+ if len(document_sections) > 0:
126
+ st.markdown("**Sections of the uploaded file:**")
127
+ for i, section in enumerate(list(document_sections)):
128
+ st.markdown(f"**Section {i+1}**\n{section}")
129
+
130
+ st.markdown("**Chat with the model:**")
131
+ for i, section in enumerate(list(document_sections)):
132
+ if i in document_responses:
133
+ st.markdown(f"**Section {i+1}**\n{document_responses[i]}")
134
+ else:
135
+ if st.button(f"Chat about Section {i+1}"):
136
+ st.write('Thinking and Reasoning with your inputs...')
137
+ response = chat_with_model(user_prompt, section)
138
+ st.write('Response:')
139
+ st.write(response)
140
+ document_responses[i] = response
141
 
142
  if st.button('πŸ’¬ Chat'):
143
  st.write('Thinking and Reasoning with your inputs...')
144
+ response = chat_with_model(user_prompt, ''.join(list(document_sections)))
145
  st.write('Response:')
146
  st.write(response)
147
 
148
  filename = generate_filename(user_prompt, choice)
149
  create_file(filename, user_prompt, response)
150
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
151
+
 
 
 
152
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
153
  for file in all_files:
154
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
 
160
  st.experimental_rerun()
161
 
162
  if __name__ == "__main__":
163
+ main()