import gradio as gr
import paddlehub as hub
ernie_zeus = hub.Module(name='ernie_zeus')
def inference(task: str,
text: str,
min_dec_len: int = 2,
seq_len: int = 512,
topp: float = 0.9,
penalty_score: float = 1.0):
func = getattr(ernie_zeus, task)
try:
result = func(text, min_dec_len, seq_len, topp, penalty_score)
return result
except Exception as error:
return str(error)
title = "ERNIE-Zeus"
description = "ERNIE-Zeus model, which supports Chinese text generates task."
block = gr.Blocks()
examples = [
[
'text_summarization',
'在芬兰、瑞典提交“入约”申请近一个月来,北约成员国内部尚未对此达成一致意见。与此同时,俄罗斯方面也多次对北约“第六轮扩张”发出警告。据北约官网显示,北约秘书长斯托尔滕贝格将于本月12日至13日出访瑞典和芬兰,并将分别与两国领导人进行会晤。',
4, 512, 0.0, 1.0
],
[
'copywriting_generation',
'芍药香氛的沐浴乳',
32, 512, 0.9, 1.2
],
[
'novel_continuation',
'昆仑山可以说是天下龙脉的根源,所有的山脉都可以看作是昆仑的分支。这些分出来的枝枝杈杈,都可以看作是一条条独立的龙脉。',
2, 512, 0.9, 1.2
],
[
'answer_generation',
'杜鹃花怎么养?',
2, 512, 0.9, 1.2
],
[
'couplet_continuation',
'天增岁月人增寿',
2, 512, 0.9, 1.0
],
[
'composition_generation',
'诚以养德,信以修身',
128, 512, 0.9, 1.2
],
[
'text_cloze',
'她有着一双[MASK]的眼眸。',
1, 512, 0.9, 1.0
],
]
with block:
gr.HTML(
"""
ERNIE-Zeus Demo
ERNIE-Zeus is a state-of-the-art Chinese text generates model.
"""
)
with gr.Group():
text = gr.Textbox(
label="input_text",
placeholder="Please enter Chinese text.",
)
btn = gr.Button(value="Generate text")
task = gr.Dropdown(label="task",
choices=[
'text_summarization',
'copywriting_generation',
'novel_continuation',
'answer_generation',
'couplet_continuation',
'composition_generation',
'text_cloze'
],
value='text_summarization')
min_dec_len = gr.Slider(minimum=1, maximum=511, value=1, label="min_dec_len", step=1, interactive=True)
seq_len = gr.Slider(minimum=2, maximum=512, value=128, label="seq_len", step=1, interactive=True)
topp = gr.Slider(minimum=0.0, maximum=1.0, value=1.0, label="topp", step=0.01, interactive=True)
penalty_score = gr.Slider(minimum=1.0, maximum=2.0, value=1.0, label="penalty_score", step=0.01, interactive=True)
text_gen = gr.Textbox(label="generated_text")
ex = gr.Examples(examples=examples, fn=inference, inputs=[task, text, min_dec_len, seq_len, topp, penalty_score], outputs=text_gen, cache_examples=False)
text.submit(inference, inputs=[task, text, min_dec_len, seq_len, topp, penalty_score], outputs=text_gen)
btn.click(inference, inputs=[task, text, min_dec_len, seq_len, topp, penalty_score], outputs=text_gen)
gr.HTML(
"""
"""
)
block.queue(max_size=100000, concurrency_count=100000).launch()