seawolf2357 commited on
Commit
29e6733
โ€ข
1 Parent(s): 67a039e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -23
app.py CHANGED
@@ -68,30 +68,20 @@ def respond(
68
  except json.JSONDecodeError:
69
  continue # ์œ ํšจํ•˜์ง€ ์•Š์€ JSON์ด ์žˆ์„ ๊ฒฝ์šฐ ๋ฌด์‹œํ•˜๊ณ  ๋‹ค์Œ ์ฒญํฌ๋กœ ๋„˜์–ด๊ฐ
70
 
71
- # Gradio Blocks API ์‚ฌ์šฉ
72
- with gr.Blocks() as demo:
73
- with gr.Row():
74
- chatbot = gr.Chatbot()
75
- with gr.Column():
76
- message = gr.Textbox(label="Your message:")
77
- system_message = gr.Textbox(value="AI Assistant Role", label="System message")
78
- max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
79
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
80
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
81
- send_button = gr.Button("Send")
82
 
83
- def handle_response(message, history, system_message, max_tokens, temperature, top_p):
84
- bot_response = respond(message, history, system_message, max_tokens, temperature, top_p)
85
- for response in bot_response:
86
- history.append((message, response))
87
- yield history, history
88
-
89
- send_button.click(
90
- handle_response,
91
- inputs=[message, chatbot, system_message, max_tokens, temperature, top_p],
92
- outputs=[chatbot, chatbot],
93
- queue=True
94
- )
95
 
96
  if __name__ == "__main__":
97
  demo.queue().launch(max_threads=20)
 
68
  except json.JSONDecodeError:
69
  continue # ์œ ํšจํ•˜์ง€ ์•Š์€ JSON์ด ์žˆ์„ ๊ฒฝ์šฐ ๋ฌด์‹œํ•˜๊ณ  ๋‹ค์Œ ์ฒญํฌ๋กœ ๋„˜์–ด๊ฐ
70
 
71
+ theme = "Nymbo/Nymbo_Theme"
 
 
 
 
 
 
 
 
 
 
72
 
73
+ # Gradio ChatInterface ์„ค์ •
74
+ demo = gr.ChatInterface(
75
+ fn=respond,
76
+ theme=theme,
77
+ additional_inputs=[
78
+ gr.Textbox(value="AI Assistant Role", label="System message"),
79
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
80
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
81
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
82
+ ],
83
+ streaming=True # ์ŠคํŠธ๋ฆฌ๋ฐ ๋ชจ๋“œ ํ™œ์„ฑํ™”
84
+ )
85
 
86
  if __name__ == "__main__":
87
  demo.queue().launch(max_threads=20)