|
from transformers import T5ForConditionalGeneration, T5Tokenizer |
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained("miiiciiii/I-Comprehend_qg") |
|
tokenizer = T5Tokenizer.from_pretrained("miiiciiii/I-Comprehend_qg") |
|
|
|
def get_question(context, answer, model, tokenizer): |
|
"""Generate a question for the given answer and context.""" |
|
answer_span = context.replace(answer, f"<hl>{answer}<hl>", 1) + "</s>" |
|
inputs = tokenizer(answer_span, return_tensors="pt") |
|
question = model.generate(input_ids=inputs.input_ids, max_length=50)[0] |
|
|
|
return tokenizer.decode(question, skip_special_tokens=True) |
|
|
|
|
|
context = "The Eiffel Tower is located in Paris and is one of the most famous landmarks in the world." |
|
answer = "Eiffel Tower" |
|
|
|
|
|
question = get_question(context, answer, model, tokenizer) |
|
print("Generated Question:", question) |