prithivMLmods commited on
Commit
1141e03
β€’
1 Parent(s): 6909be2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -20
app.py CHANGED
@@ -20,14 +20,7 @@ client = OpenAI(
20
  api_key=ACCESS_TOKEN,
21
  )
22
 
23
- def respond(
24
- message,
25
- history,
26
- system_message,
27
- max_tokens,
28
- temperature,
29
- top_p,
30
- ):
31
  if history is None:
32
  history = []
33
 
@@ -43,7 +36,7 @@ def respond(
43
 
44
  response = ""
45
 
46
- for message in client.chat.completions.create(
47
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
48
  max_tokens=max_tokens,
49
  stream=True,
@@ -102,23 +95,31 @@ with demo:
102
  temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
103
  top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
104
 
105
- chatbot = gr.ChatInterface(
106
- fn=respond,
107
- additional_inputs=[system_message, max_tokens, temperature, top_p],
108
- state=history_state, # Pass the history state to the ChatInterface
109
- )
110
-
111
  with gr.Row():
112
- conversion_type = gr.Dropdown(choices=["PDF", "DOCX", "TXT"], value="PDF", label="Conversion Type")
113
- download_button = gr.Button("Convert and Download")
 
 
 
 
114
 
115
  file_output = gr.File()
 
 
 
 
 
 
 
 
 
 
116
 
117
  download_button.click(
118
- convert_and_download,
119
- inputs=[history_state, conversion_type], # Pass the history state to the conversion function
120
  outputs=file_output
121
  )
122
 
123
  if __name__ == "__main__":
124
- demo.launch()
 
20
  api_key=ACCESS_TOKEN,
21
  )
22
 
23
+ def respond(message, history, system_message, max_tokens, temperature, top_p):
 
 
 
 
 
 
 
24
  if history is None:
25
  history = []
26
 
 
36
 
37
  response = ""
38
 
39
+ for message in client.chat.completions.create(
40
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
41
  max_tokens=max_tokens,
42
  stream=True,
 
95
  temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
96
  top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
97
 
 
 
 
 
 
 
98
  with gr.Row():
99
+ conversation_input = gr.Textbox(label="Message")
100
+ chat_output = gr.Textbox(label="Response", interactive=False)
101
+
102
+ chat = gr.Button("Send")
103
+ conversion_type = gr.Dropdown(choices=["PDF", "DOCX", "TXT"], value="PDF", label="Conversion Type")
104
+ download_button = gr.Button("Convert and Download")
105
 
106
  file_output = gr.File()
107
+
108
+ def update_history(message, history, system_message, max_tokens, temperature, top_p):
109
+ history = respond(message, history, system_message, max_tokens, temperature, top_p)
110
+ return history
111
+
112
+ chat.click(
113
+ fn=update_history,
114
+ inputs=[conversation_input, history_state, system_message, max_tokens, temperature, top_p],
115
+ outputs=[chat_output, history_state]
116
+ )
117
 
118
  download_button.click(
119
+ fn=convert_and_download,
120
+ inputs=[history_state, conversion_type],
121
  outputs=file_output
122
  )
123
 
124
  if __name__ == "__main__":
125
+ demo.launch()