JustinLin610 commited on
Commit
ef65407
1 Parent(s): 766788e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +263 -0
app.py ADDED
@@ -0,0 +1,263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Alibaba Cloud.
2
+ #
3
+ # This source code is licensed under the license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+ import os
6
+ import numpy as np
7
+ from urllib3.exceptions import HTTPError
8
+ os.system('pip install dashscope modelscope -U')
9
+ os.system('pip install gradio==3.*')
10
+
11
+ # os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'
12
+ from argparse import ArgumentParser
13
+ from pathlib import Path
14
+
15
+ import copy
16
+ import gradio as gr
17
+ import os
18
+ import re
19
+ import secrets
20
+ import tempfile
21
+ import requests
22
+ from http import HTTPStatus
23
+ from dashscope import MultiModalConversation
24
+ import dashscope
25
+ API_KEY = os.environ['API_KEY']
26
+ dashscope.api_key = API_KEY
27
+
28
+ DEFAULT_CKPT_PATH = 'qwen/Qwen-VL-Chat'
29
+ REVISION = 'v1.0.4'
30
+ BOX_TAG_PATTERN = r"<box>([\s\S]*?)</box>"
31
+ PUNCTUATION = "!?。"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟…‧﹏."
32
+
33
+
34
+ def _get_args():
35
+ parser = ArgumentParser()
36
+ parser.add_argument("-c", "--checkpoint-path", type=str, default=DEFAULT_CKPT_PATH,
37
+ help="Checkpoint name or path, default to %(default)r")
38
+ parser.add_argument("--revision", type=str, default=REVISION)
39
+ parser.add_argument("--cpu-only", action="store_true", help="Run demo with CPU only")
40
+
41
+ parser.add_argument("--share", action="store_true", default=False,
42
+ help="Create a publicly shareable link for the interface.")
43
+ parser.add_argument("--inbrowser", action="store_true", default=False,
44
+ help="Automatically launch the interface in a new tab on the default browser.")
45
+ parser.add_argument("--server-port", type=int, default=7860,
46
+ help="Demo server port.")
47
+ parser.add_argument("--server-name", type=str, default="127.0.0.1",
48
+ help="Demo server name.")
49
+
50
+ args = parser.parse_args()
51
+ return args
52
+
53
+ def _parse_text(text):
54
+ lines = text.split("\n")
55
+ lines = [line for line in lines if line != ""]
56
+ count = 0
57
+ for i, line in enumerate(lines):
58
+ if "```" in line:
59
+ count += 1
60
+ items = line.split("`")
61
+ if count % 2 == 1:
62
+ lines[i] = f'<pre><code class="language-{items[-1]}">'
63
+ else:
64
+ lines[i] = f"<br></code></pre>"
65
+ else:
66
+ if i > 0:
67
+ if count % 2 == 1:
68
+ line = line.replace("`", r"\`")
69
+ line = line.replace("<", "&lt;")
70
+ line = line.replace(">", "&gt;")
71
+ line = line.replace(" ", "&nbsp;")
72
+ line = line.replace("*", "&ast;")
73
+ line = line.replace("_", "&lowbar;")
74
+ line = line.replace("-", "&#45;")
75
+ line = line.replace(".", "&#46;")
76
+ line = line.replace("!", "&#33;")
77
+ line = line.replace("(", "&#40;")
78
+ line = line.replace(")", "&#41;")
79
+ line = line.replace("$", "&#36;")
80
+ lines[i] = "<br>" + line
81
+ text = "".join(lines)
82
+ return text
83
+
84
+
85
+ """
86
+ ('/tmp/gradio/1837abb0176495ff182050801ebff1fa9b18fc4a/aiyinsitan.jpg',),
87
+ None],
88
+ ['这是谁?',
89
+ '图中是爱因斯坦,阿尔伯特·爱因斯坦(Albert '
90
+ 'Einstein),是出生于德国、拥有瑞士和美国国籍的犹太裔理论物理学家,他创立了现代物理学的两大支柱的相对论及量子力学。'],
91
+ ['框处里面的人', '图中框内是爱因斯坦的半身照,照片中爱因斯坦穿着一件西装,留着标志性的胡子和蜷曲的头发。'],
92
+ ['框出里面的人',
93
+ ('/tmp/gradio/71cf5c2551009fd9a00e0d80bc7ab7fb8de211b5/tmp115aba5d70.jpg',)],
94
+ [None, '里面的人'],
95
+ ('介绍一下',
96
+ '阿尔伯特·爱因斯坦(Albert '
97
+ 'Einstein),是出生于德国、拥有瑞士和美国国籍的犹太裔理论物理学家,他创立了现代物理学的两大支柱的相对论及量子力学。他的贡献包括他提出的相对论(尤其是狭义相对论和广义相对论)、量子力学的开创性贡献以及他对于 '
98
+ 'gravity 的贡献。爱因斯坦也是诺贝尔奖得主以及美国公民。')]
99
+ """
100
+
101
+ def _remove_image_special(text):
102
+ text = text.replace('<ref>', '').replace('</ref>', '')
103
+ return re.sub(r'<box>.*?(</box>|$)', '', text)
104
+
105
+ def _launch_demo(args):
106
+ uploaded_file_dir = os.environ.get("GRADIO_TEMP_DIR") or str(
107
+ Path(tempfile.gettempdir()) / "gradio"
108
+ )
109
+
110
+ def predict(_chatbot, task_history):
111
+ chat_query = _chatbot[-1][0]
112
+ query = task_history[-1][0]
113
+ if len(chat_query) == 0:
114
+ _chatbot.pop()
115
+ task_history.pop()
116
+ return _chatbot
117
+ print("User: " + _parse_text(query))
118
+ history_cp = copy.deepcopy(task_history)
119
+ full_response = ""
120
+ messages = []
121
+ content = []
122
+ for q, a in history_cp:
123
+ if isinstance(q, (tuple, list)):
124
+ content.append({'image': f'file://{q[0]}'})
125
+ else:
126
+ content.append({'text': q})
127
+ messages.append({'role': 'user', 'content': content})
128
+ messages.append({'role': 'assistant', 'content': [{'text': a}]})
129
+ content = []
130
+ messages.pop()
131
+ responses = MultiModalConversation.call(
132
+ model='qwen-vl-plus', messages=messages,
133
+ seed=np.random.randint(0, np.iinfo(np.int64).max),
134
+ top_p=0.001,
135
+ stream=True,
136
+ )
137
+ for response in responses:
138
+ if not response.status_code == HTTPStatus.OK:
139
+ raise HTTPError(f'response.code: {response.code}\nresponse.message: {response.message}')
140
+ response = response.output.choices[0].message.content
141
+ response_text = []
142
+ for ele in response:
143
+ if 'text' in ele:
144
+ response_text.append(ele['text'])
145
+ elif 'box' in ele:
146
+ response_text.append(ele['box'])
147
+ response_text = ''.join(response_text)
148
+ _chatbot[-1] = (_parse_text(chat_query), _remove_image_special(response_text))
149
+ yield _chatbot
150
+
151
+ if len(response) > 1:
152
+ result_image = response[-1]['result_image']
153
+ resp = requests.get(result_image)
154
+ os.makedirs(uploaded_file_dir, exist_ok=True)
155
+ name = f"tmp{secrets.token_hex(20)}.jpg"
156
+ filename = os.path.join(uploaded_file_dir, name)
157
+ with open(filename, 'wb') as f:
158
+ f.write(resp.content)
159
+ response = ''.join(r['box'] if 'box' in r else r['text'] for r in response[:-1])
160
+ _chatbot.append((None, (filename,)))
161
+ else:
162
+ response = response[0]['text']
163
+ _chatbot[-1] = (_parse_text(chat_query), response)
164
+ full_response = _parse_text(response)
165
+
166
+ task_history[-1] = (query, full_response)
167
+ print("Qwen-VL-Chat: " + _parse_text(full_response))
168
+ # task_history = task_history[-10:]
169
+ yield _chatbot
170
+
171
+
172
+ def regenerate(_chatbot, task_history):
173
+ if not task_history:
174
+ return _chatbot
175
+ item = task_history[-1]
176
+ if item[1] is None:
177
+ return _chatbot
178
+ task_history[-1] = (item[0], None)
179
+ chatbot_item = _chatbot.pop(-1)
180
+ if chatbot_item[0] is None:
181
+ _chatbot[-1] = (_chatbot[-1][0], None)
182
+ else:
183
+ _chatbot.append((chatbot_item[0], None))
184
+ return predict(_chatbot, task_history)
185
+
186
+ def add_text(history, task_history, text):
187
+ task_text = text
188
+ if len(text) >= 2 and text[-1] in PUNCTUATION and text[-2] not in PUNCTUATION:
189
+ task_text = text[:-1]
190
+ history = history + [(_parse_text(text), None)]
191
+ task_history = task_history + [(task_text, None)]
192
+ return history, task_history, ""
193
+
194
+ def add_file(history, task_history, file):
195
+ history = history + [((file.name,), None)]
196
+ task_history = task_history + [((file.name,), None)]
197
+ return history, task_history
198
+
199
+ def reset_user_input():
200
+ return gr.update(value="")
201
+
202
+ def reset_state(task_history):
203
+ task_history.clear()
204
+ return []
205
+
206
+ with gr.Blocks() as demo:
207
+ gr.Markdown("""\
208
+ <p align="center"><img src="https://modelscope.cn/api/v1/models/qwen/Qwen-VL-Chat/repo?Revision=master&FilePath=assets/logo.jpg&View=true" style="height: 80px"/><p>""")
209
+ gr.Markdown("""<center><font size=8>Qwen-VL-Plus</center>""")
210
+ gr.Markdown(
211
+ """\
212
+ <center><font size=3>This WebUI is based on Qwen-VL-Plus, the upgraded version of Qwen-VL, developed by Alibaba Cloud.</center>""")
213
+ gr.Markdown("""<center><font size=3>本WebUI基于Qwen-VL-Plus打造,这是Qwen-VL的升级版。</center>""")
214
+ gr.Markdown("""\
215
+ <center><font size=4> \
216
+ <a href="https://github.com/QwenLM/Qwen-VL#qwen-vl-plus">Github</a>&nbsp | &nbsp
217
+ Qwen-VL <a href="https://modelscope.cn/models/qwen/Qwen-VL/summary">🤖 </a>
218
+ | <a href="https://huggingface.co/Qwen/Qwen-VL">🤗</a>&nbsp |
219
+ Qwen-VL-Chat <a href="https://modelscope.cn/models/qwen/Qwen-VL-Chat/summary">🤖 </a> |
220
+ <a href="https://huggingface.co/Qwen/Qwen-VL-Chat">🤗</a>&nbsp |
221
+ &nbsp Qwen-VL-Plus &nbsp <a href="https://qianwen.aliyun.com">Web</a> |
222
+ <a href="https://help.aliyun.com/zh/dashscope/developer-reference/vl-plus-quick-start/">API</a></center>""")
223
+
224
+ chatbot = gr.Chatbot(label='Qwen-VL-Plus', elem_classes="control-height", height=500)
225
+ query = gr.Textbox(lines=2, label='Input')
226
+ task_history = gr.State([])
227
+
228
+ with gr.Row():
229
+ addfile_btn = gr.UploadButton("📁 Upload (上传文件)", file_types=["image"])
230
+ submit_btn = gr.Button("🚀 Submit (发送)")
231
+ regen_btn = gr.Button("🤔️ Regenerate (重试)")
232
+ empty_bin = gr.Button("🧹 Clear History (清除历史)")
233
+
234
+ submit_btn.click(add_text, [chatbot, task_history, query], [chatbot, task_history]).then(
235
+ predict, [chatbot, task_history], [chatbot], show_progress=True
236
+ )
237
+ submit_btn.click(reset_user_input, [], [query])
238
+ empty_bin.click(reset_state, [task_history], [chatbot], show_progress=True)
239
+ regen_btn.click(regenerate, [chatbot, task_history], [chatbot], show_progress=True)
240
+ addfile_btn.upload(add_file, [chatbot, task_history, addfile_btn], [chatbot, task_history], show_progress=True)
241
+
242
+ gr.Markdown("""\
243
+ <font size=2>Note: This demo is governed by the original license of Qwen-VL. \
244
+ We strongly advise users not to knowingly generate or allow others to knowingly generate harmful content, \
245
+ including hate speech, violence, pornography, deception, etc. \
246
+ (注:本演示受Qwen-VL的许可协议限制。我们强烈建议,用户不应传播及不应允许他人传播以下内容,\
247
+ 包括但不限于仇恨言论、暴力、色情、欺诈相关的有害信息。)""")
248
+
249
+ demo.queue().launch(
250
+ share=args.share,
251
+ # inbrowser=args.inbrowser,
252
+ # server_port=args.server_port,
253
+ # server_name=args.server_name,
254
+ )
255
+
256
+
257
+ def main():
258
+ args = _get_args()
259
+ _launch_demo(args)
260
+
261
+
262
+ if __name__ == '__main__':
263
+ main()