Tahsin-Mayeesha commited on
Commit
ff0dcf7
1 Parent(s): eae27ac

Update for Bangla

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1,12 +1,13 @@
1
  import torch
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
- model = AutoModelForSeq2SeqLM.from_pretrained("Mihakram/AraT5-base-question-generation")
4
- tokenizer = AutoTokenizer.from_pretrained("Mihakram/AraT5-base-question-generation")
 
5
 
6
 
7
  import gradio as gr
8
  def generate__questions(context,answer):
9
- text="context: " +context + " " + "answer: " + answer + " </s>"
10
  text_encoding = tokenizer.encode_plus(
11
  text,return_tensors="pt"
12
  )
@@ -24,6 +25,6 @@ def generate__questions(context,answer):
24
  demo = gr.Interface(fn=generate__questions, inputs=[gr.Textbox(label='Context'),
25
  gr.Textbox(label='Answer')] ,
26
  outputs=gr.Textbox(label='Question'),
27
- title="Arabic Question Generation",
28
  description="Get the Question from given Context and an Answer")
29
  demo.launch()
 
1
  import torch
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
+ model_name = "Tahsin-Mayeesha/squad-bn-mt5-base2"
4
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
5
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
6
 
7
 
8
  import gradio as gr
9
  def generate__questions(context,answer):
10
+ text='answer: '+answer + ' context: ' + context
11
  text_encoding = tokenizer.encode_plus(
12
  text,return_tensors="pt"
13
  )
 
25
  demo = gr.Interface(fn=generate__questions, inputs=[gr.Textbox(label='Context'),
26
  gr.Textbox(label='Answer')] ,
27
  outputs=gr.Textbox(label='Question'),
28
+ title="Bangla Question Generation",
29
  description="Get the Question from given Context and an Answer")
30
  demo.launch()