cuneytkaya commited on
Commit
936ecd6
1 Parent(s): 7055800

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -2
app.py CHANGED
@@ -6,14 +6,26 @@ model_name = "cuneytkaya/fine-tuned-t5-small-turkish-mmlu"
6
  tokenizer = T5Tokenizer.from_pretrained(model_name)
7
  model = T5ForConditionalGeneration.from_pretrained(model_name)
8
 
 
9
  def generate_answer(question):
10
  input_text = f"Soru: {question}"
 
 
11
  inputs = tokenizer(input_text, return_tensors="pt")
12
- outputs = model.generate(**inputs)
 
 
 
 
13
  answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
 
14
  return answer
15
 
16
- # Gradio arayüzü
17
  interface = gr.Interface(
18
  fn=generate_answer,
19
  inputs="text",
@@ -22,4 +34,6 @@ interface = gr.Interface(
22
  description="This model answers questions from Turkish academic exams like KPSS, TUS, etc.",
23
  )
24
 
 
25
  interface.launch(share=True)
 
 
6
  tokenizer = T5Tokenizer.from_pretrained(model_name)
7
  model = T5ForConditionalGeneration.from_pretrained(model_name)
8
 
9
+
10
  def generate_answer(question):
11
  input_text = f"Soru: {question}"
12
+
13
+
14
  inputs = tokenizer(input_text, return_tensors="pt")
15
+
16
+
17
+ outputs = model.generate(**inputs, max_length=50, num_beams=4, early_stopping=True)
18
+
19
+
20
  answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
21
+
22
+
23
+ if "Cevap:" in answer:
24
+ return answer.split("Cevap:")[1].strip()
25
+
26
  return answer
27
 
28
+
29
  interface = gr.Interface(
30
  fn=generate_answer,
31
  inputs="text",
 
34
  description="This model answers questions from Turkish academic exams like KPSS, TUS, etc.",
35
  )
36
 
37
+
38
  interface.launch(share=True)
39
+