Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,8 @@ import os
|
|
2 |
import re
|
3 |
import pdfminer
|
4 |
from pdfminer.high_level import extract_pages
|
5 |
-
from transformers import pipeline,
|
|
|
6 |
|
7 |
import streamlit as st
|
8 |
|
@@ -41,9 +42,9 @@ def answer_question(text, question):
|
|
41 |
Returns:
|
42 |
The answer extracted from the text using the model.
|
43 |
"""
|
44 |
-
qa_model_name = "
|
45 |
|
46 |
-
qa_model =
|
47 |
tokenizer = AutoTokenizer.from_pretrained(qa_model_name)
|
48 |
|
49 |
inputs = tokenizer(question, text, return_tensors="pt") # Tokenize inputs
|
|
|
2 |
import re
|
3 |
import pdfminer
|
4 |
from pdfminer.high_level import extract_pages
|
5 |
+
from transformers import pipeline, TFBertForQuestionAnswering, AutoTokenizer
|
6 |
+
|
7 |
|
8 |
import streamlit as st
|
9 |
|
|
|
42 |
Returns:
|
43 |
The answer extracted from the text using the model.
|
44 |
"""
|
45 |
+
qa_model_name = "bert-base-uncased" # Replace with your chosen model
|
46 |
|
47 |
+
qa_model = TFBertForQuestionAnswering.from_pretrained(qa_model_name)
|
48 |
tokenizer = AutoTokenizer.from_pretrained(qa_model_name)
|
49 |
|
50 |
inputs = tokenizer(question, text, return_tensors="pt") # Tokenize inputs
|