import PyPDF2 import nltk from nltk.tokenize import sent_tokenize import random import requests import streamlit as st # Download NLTK data (if not already downloaded) nltk.download('punkt') nltk.download('averaged_perceptron_tagger') # ChatGPT API endpoint CHATGPT_API_ENDPOINT = "https://api.openai.com/v1/chat/completions" OPENAI_API_KEY = "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" # Replace with your ChatGPT API key def extract_text_from_pdf(pdf_file): pdf_reader = PyPDF2.PdfReader(pdf_file) text = "" for page_num in range(len(pdf_reader.pages)): text += pdf_reader.pages[page_num].extract_text() return text def generate_mcqs_on_topic(text, topic, num_mcqs=5): # Tokenize the text into sentences sentences = nltk.sent_tokenize(text) # Randomly select sentences to create Questions selected_sentences = random.sample(sentences, min(num_mcqs, len(sentences))) mcqs = [] for sentence in selected_sentences: # Use ChatGPT for interactive question generation chatgpt_question = generate_question_with_chatgpt(sentence) mcqs.append(chatgpt_question) # Use LLM for nuanced language modeling llm_question = generate_question_with_llm(sentence, topic) mcqs.append(llm_question) return mcqs def generate_question_with_chatgpt(context): headers = { "Content-Type": "application/json", "Authorization": f"Bearer {OPENAI_API_KEY}", } data = { "model": "gpt-3.5-turbo", "messages": [ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": f"What is the question for the following? {context}"}, ], } response = requests.post(CHATGPT_API_ENDPOINT, json=data, headers=headers) result = response.json() # Extract the generated question from the response generated_question = result["choices"][0]["message"]["content"] return generated_question response = requests.post(LLM_API_ENDPOINT, json=data, headers=headers) result = response.json() # Extract the generated question from the response generated_question = result["generated_question"] return generated_question def main(): # Title of the Application st.title("🤖CB Quiz Generator🧠") st.subheader("☕CoffeeBeans☕") # User input pdf_file = st.file_uploader("Upload PDF Document:", type=["pdf"]) num_mcqs = st.number_input("Enter Number of MCQs to Generate:", min_value=1, step=1, value=5) topic = st.text_input("Enter the Topic in which the quiz has to be generated") # Button to trigger QUIZ generation if st.button("Generate MCQs"): if pdf_file: text = extract_text_from_pdf(pdf_file) mcqs = generate_mcqs_on_topic(text, topic, num_mcqs) # Display the generated Questions st.success(f"Generated {num_mcqs} Questions:") for i, question in enumerate(mcqs, start=1): st.write(f"\nQuestion {i}: {question}") else: st.error("Please upload a PDF document.") if __name__ == "__main__": main()