awacke1 commited on
Commit
6e969ba
β€’
1 Parent(s): 36251df

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +67 -0
app.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import openai
3
+ import os
4
+ import base64
5
+ import glob
6
+ from datetime import datetime
7
+ from dotenv import load_dotenv
8
+ from openai import ChatCompletion
9
+
10
+ load_dotenv()
11
+
12
+ openai.api_key = os.getenv('OPENAI_KEY')
13
+
14
+ def chat_with_model(prompts):
15
+ model = "gpt-3.5-turbo"
16
+
17
+ conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
18
+ conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
19
+
20
+ response = openai.ChatCompletion.create(model=model, messages=conversation)
21
+ return response['choices'][0]['message']['content']
22
+
23
+ def generate_filename(prompt):
24
+ safe_date_time = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
25
+ safe_prompt = "".join(x for x in prompt if x.isalnum())[:50]
26
+ return f"{safe_date_time}_{safe_prompt}.htm"
27
+
28
+ def create_file(filename, prompt, response):
29
+ with open(filename, 'w') as file:
30
+ file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
31
+
32
+ def get_table_download_link(file_path):
33
+ with open(file_path, 'r') as file:
34
+ data = file.read()
35
+ b64 = base64.b64encode(data.encode()).decode()
36
+ href = f'<a href="data:file/htm;base64,{b64}" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
37
+ return href
38
+
39
+ def main():
40
+ st.title("Chat with AI")
41
+
42
+ # Pre-defined prompts
43
+ prompts = ['Hows the weather?', 'Tell me a joke.', 'What is the meaning of life?']
44
+
45
+ # User prompt input
46
+ user_prompt = st.text_input("Your question:", '')
47
+
48
+ if user_prompt:
49
+ prompts.append(user_prompt)
50
+
51
+ if st.button('Chat'):
52
+ st.write('Chatting with GPT-3...')
53
+ response = chat_with_model(prompts)
54
+ st.write('Response:')
55
+ st.write(response)
56
+
57
+ filename = generate_filename(user_prompt)
58
+ create_file(filename, user_prompt, response)
59
+
60
+ st.markdown(get_table_download_link(filename), unsafe_allow_html=True)
61
+
62
+ htm_files = glob.glob("*.htm")
63
+ for file in htm_files:
64
+ st.markdown(get_table_download_link(file), unsafe_allow_html=True)
65
+
66
+ if __name__ == "__main__":
67
+ main()