Spaces:
Sleeping
Sleeping
Update QnA.py
Browse files
QnA.py
CHANGED
@@ -9,19 +9,19 @@ def get_pdf_content(pdf_path):
|
|
9 |
reader = PyPDF2.PdfReader(file)
|
10 |
text = ""
|
11 |
for page in reader.pages:
|
12 |
-
text += page.extract_text()
|
13 |
return text
|
14 |
|
15 |
def get_answer(question, pdf_path):
|
16 |
pdf_content = get_pdf_content(pdf_path)
|
|
|
17 |
response = client.chat.completions.create(
|
18 |
model="meta-llama/Llama-3-8b-chat-hf",
|
19 |
-
messages=[{"role": "user", "content":
|
20 |
-
context=pdf_content
|
21 |
)
|
22 |
return response.choices[0].message.content
|
23 |
|
24 |
if __name__ == "__main__":
|
25 |
pdf_path = "sample.pdf" # Adjust path as necessary
|
26 |
question = "What are some fun things to do in New York?"
|
27 |
-
print(get_answer(question, pdf_path))
|
|
|
9 |
reader = PyPDF2.PdfReader(file)
|
10 |
text = ""
|
11 |
for page in reader.pages:
|
12 |
+
text += page.extract_text() if page.extract_text() else ""
|
13 |
return text
|
14 |
|
15 |
def get_answer(question, pdf_path):
|
16 |
pdf_content = get_pdf_content(pdf_path)
|
17 |
+
full_question = f"{question}\n\n{pdf_content}" # Append PDF content to the question
|
18 |
response = client.chat.completions.create(
|
19 |
model="meta-llama/Llama-3-8b-chat-hf",
|
20 |
+
messages=[{"role": "user", "content": full_question}],
|
|
|
21 |
)
|
22 |
return response.choices[0].message.content
|
23 |
|
24 |
if __name__ == "__main__":
|
25 |
pdf_path = "sample.pdf" # Adjust path as necessary
|
26 |
question = "What are some fun things to do in New York?"
|
27 |
+
print(get_answer(question, pdf_path))
|