prithivMLmods commited on
Commit
edb2d41
β€’
1 Parent(s): f41cfa8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +121 -74
app.py CHANGED
@@ -1,10 +1,12 @@
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
4
- from fpdf import FPDF # For PDF conversion
5
- from docx import Document # For DOCX conversion
6
- import tempfile
 
7
 
 
8
  css = '''
9
  .gradio-container{max-width: 1000px !important}
10
  h1{text-align:center}
@@ -13,6 +15,7 @@ footer {
13
  }
14
  '''
15
 
 
16
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
17
 
18
  client = OpenAI(
@@ -20,12 +23,17 @@ client = OpenAI(
20
  api_key=ACCESS_TOKEN,
21
  )
22
 
23
- def respond(message, history, system_message, max_tokens, temperature, top_p):
24
- if history is None:
25
- history = []
26
-
 
 
 
 
 
27
  messages = [{"role": "system", "content": system_message}]
28
-
29
  for val in history:
30
  if val[0]:
31
  messages.append({"role": "user", "content": val[0]})
@@ -45,81 +53,120 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
45
  messages=messages,
46
  ):
47
  token = message.choices[0].delta.content
48
-
49
  response += token
50
  yield response
51
-
52
- history.append((message, response))
53
- return history
54
 
55
- def save_as_file(history, conversion_type):
56
- input_text = "\n".join([f"User: {h[0]}" for h in history if h[0]])
57
- output_text = "\n".join([f"Assistant: {h[1]}" for h in history if h[1]])
58
- file_name = None
 
 
 
 
 
 
 
 
59
 
60
- if conversion_type == "PDF":
61
- pdf = FPDF()
62
- pdf.add_page()
63
- pdf.set_font("Arial", size=12)
64
- pdf.multi_cell(0, 10, f"User Query: {input_text}\n\nResponse: {output_text}")
65
- file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf").name
66
- pdf.output(file_name)
67
- elif conversion_type == "DOCX":
68
- doc = Document()
69
- doc.add_heading('Conversation', 0)
70
- doc.add_paragraph(f"User Query: {input_text}\n\nResponse: {output_text}")
71
- file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".docx").name
72
- doc.save(file_name)
73
- elif conversion_type == "TXT":
74
- file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".txt").name
75
- with open(file_name, "w") as f:
76
- f.write(f"User Query: {input_text}\n\nResponse: {output_text}")
77
 
78
- return file_name
79
-
80
- def convert_and_download(history, conversion_type):
81
- if not history:
82
- return None
83
-
84
- file_path = save_as_file(history, conversion_type)
85
- return file_path
86
-
87
- demo = gr.Blocks(css=css)
88
-
89
- with demo:
90
- history_state = gr.State([]) # Initialize an empty list to store the conversation history
91
-
92
- with gr.Row():
93
- system_message = gr.Textbox(value="", label="System message")
94
- max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
95
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
96
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
97
 
98
- with gr.Row():
99
- conversation_input = gr.Textbox(label="Message")
100
- chat_output = gr.Textbox(label="Response", interactive=False)
101
 
102
- chat = gr.Button("Send")
103
- conversion_type = gr.Dropdown(choices=["PDF", "DOCX", "TXT"], value="PDF", label="Conversion Type")
104
- download_button = gr.Button("Convert and Download")
 
 
 
 
 
 
 
 
 
105
 
106
- file_output = gr.File()
 
107
 
108
- def update_history(message, history, system_message, max_tokens, temperature, top_p):
109
- history = respond(message, history, system_message, max_tokens, temperature, top_p)
110
- return history
111
-
112
- chat.click(
113
- fn=update_history,
114
- inputs=[conversation_input, history_state, system_message, max_tokens, temperature, top_p],
115
- outputs=[chat_output, history_state]
116
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
 
118
- download_button.click(
119
- fn=convert_and_download,
120
- inputs=[history_state, conversion_type],
121
- outputs=file_output
122
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
 
124
  if __name__ == "__main__":
125
- demo.launch()
 
1
  import gradio as gr
2
  from openai import OpenAI
3
  import os
4
+ from io import BytesIO
5
+ from reportlab.lib.pagesizes import letter
6
+ from reportlab.pdfgen import canvas
7
+ from docx import Document
8
 
9
+ # Custom CSS
10
  css = '''
11
  .gradio-container{max-width: 1000px !important}
12
  h1{text-align:center}
 
15
  }
16
  '''
17
 
18
+ # Set up OpenAI client
19
  ACCESS_TOKEN = os.getenv("HF_TOKEN")
20
 
21
  client = OpenAI(
 
23
  api_key=ACCESS_TOKEN,
24
  )
25
 
26
+ # Function to handle chat responses
27
+ def respond(
28
+ message,
29
+ history: list[tuple[str, str]],
30
+ system_message,
31
+ max_tokens,
32
+ temperature,
33
+ top_p,
34
+ ):
35
  messages = [{"role": "system", "content": system_message}]
36
+
37
  for val in history:
38
  if val[0]:
39
  messages.append({"role": "user", "content": val[0]})
 
53
  messages=messages,
54
  ):
55
  token = message.choices[0].delta.content
 
56
  response += token
57
  yield response
 
 
 
58
 
59
+ # Function to save chat history to a text file
60
+ def save_as_txt(history):
61
+ with open("chat_history.txt", "w") as f:
62
+ for user_message, assistant_message in history:
63
+ f.write(f"User: {user_message}\n")
64
+ f.write(f"Assistant: {assistant_message}\n")
65
+ return "chat_history.txt"
66
+
67
+ # Function to save chat history to a DOCX file
68
+ def save_as_docx(history):
69
+ doc = Document()
70
+ doc.add_heading('Chat History', 0)
71
 
72
+ for user_message, assistant_message in history:
73
+ doc.add_paragraph(f"User: {user_message}")
74
+ doc.add_paragraph(f"Assistant: {assistant_message}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
 
76
+ doc.save("chat_history.docx")
77
+ return "chat_history.docx"
78
+
79
+ # Function to save chat history to a PDF file
80
+ def save_as_pdf(history):
81
+ buffer = BytesIO()
82
+ c = canvas.Canvas(buffer, pagesize=letter)
83
+ width, height = letter
84
+ y = height - 40
 
 
 
 
 
 
 
 
 
 
85
 
86
+ c.drawString(30, y, "Chat History")
87
+ y -= 30
 
88
 
89
+ for user_message, assistant_message in history:
90
+ c.drawString(30, y, f"User: {user_message}")
91
+ y -= 20
92
+ c.drawString(30, y, f"Assistant: {assistant_message}")
93
+ y -= 30
94
+
95
+ if y < 40:
96
+ c.showPage()
97
+ y = height - 40
98
+
99
+ c.save()
100
+ buffer.seek(0)
101
 
102
+ with open("chat_history.pdf", "wb") as f:
103
+ f.write(buffer.read())
104
 
105
+ return "chat_history.pdf"
106
+
107
+ # Gradio interface
108
+ def handle_file_save(history, file_format):
109
+ if file_format == "txt":
110
+ return save_as_txt(history)
111
+ elif file_format == "docx":
112
+ return save_as_docx(history)
113
+ elif file_format == "pdf":
114
+ return save_as_pdf(history)
115
+
116
+ demo = gr.ChatInterface(
117
+ respond,
118
+ additional_inputs=[
119
+ gr.Textbox(value="", label="System message"),
120
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
121
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
122
+ gr.Slider(
123
+ minimum=0.1,
124
+ maximum=1.0,
125
+ value=0.95,
126
+ step=0.05,
127
+ label="Top-P",
128
+ ),
129
+ gr.Dropdown(
130
+ choices=["txt", "docx", "pdf"],
131
+ label="Save as",
132
+ ),
133
+ ],
134
+ outputs=[
135
+ gr.File(label="Download Chat History"),
136
+ ],
137
+ css=css,
138
+ theme="allenai/gradio-theme",
139
+ )
140
 
141
+ def save_handler(message, history, system_message, max_tokens, temperature, top_p, file_format):
142
+ response = respond(message, history, system_message, max_tokens, temperature, top_p)
143
+ saved_file = handle_file_save(history, file_format)
144
+ return saved_file
145
+
146
+ demo = gr.Interface(
147
+ fn=save_handler,
148
+ inputs=[
149
+ gr.Textbox(value="", label="Message"),
150
+ gr.State(),
151
+ gr.Textbox(value="", label="System message"),
152
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
153
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
154
+ gr.Slider(
155
+ minimum=0.1,
156
+ maximum=1.0,
157
+ value=0.95,
158
+ step=0.05,
159
+ label="Top-P",
160
+ ),
161
+ gr.Dropdown(
162
+ choices=["txt", "docx", "pdf"],
163
+ label="Save as",
164
+ ),
165
+ ],
166
+ outputs=gr.File(label="Download Chat History"),
167
+ css=css,
168
+ theme="allenai/gradio-theme",
169
+ )
170
 
171
  if __name__ == "__main__":
172
+ demo.launch()