Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,7 @@ import numpy as np
|
|
8 |
import tensorflow_hub as hub
|
9 |
import openai
|
10 |
import gradio as gr
|
|
|
11 |
|
12 |
def download_pdf(url, output_path):
|
13 |
urllib.request.urlretrieve(url, output_path)
|
@@ -239,25 +240,19 @@ title = 'PDF GPT Turbo'
|
|
239 |
description = """ PDF GPT Turbo allows you to chat with your PDF files. It uses Google's Universal Sentence Encoder with Deep averaging network (DAN) to give hallucination free response by improving the embedding quality of OpenAI. It cites the page number in square brackets([Page No.]) and shows where the information is located, adding credibility to the responses."""
|
240 |
|
241 |
with gr.Blocks(css="""#chatbot { font-size: 14px; min-height: 1200; }""") as demo:
|
242 |
-
|
243 |
gr.Markdown(f'<center><h3>{title}</h3></center>')
|
244 |
gr.Markdown(description)
|
245 |
|
246 |
with gr.Row():
|
247 |
-
|
248 |
-
|
249 |
-
gr.
|
250 |
-
|
251 |
-
|
252 |
-
url = gr.Textbox(label='Enter PDF URL here (Example: https://arxiv.org/pdf/1706.03762.pdf )')
|
253 |
gr.Markdown("<center><h4>OR<h4></center>")
|
254 |
-
file = gr.File(label='Upload your PDF/
|
255 |
-
|
256 |
-
|
257 |
-
[[q] for q in questions],
|
258 |
-
inputs=[question],
|
259 |
-
label="PRE-DEFINED QUESTIONS: Click on a question to auto-fill the input box, then press Enter!",
|
260 |
-
)
|
261 |
model = gr.Radio(
|
262 |
choices=[
|
263 |
'gpt-3.5-turbo',
|
@@ -271,18 +266,71 @@ with gr.Blocks(css="""#chatbot { font-size: 14px; min-height: 1200; }""") as dem
|
|
271 |
label='Select Model',
|
272 |
value='gpt-3.5-turbo'
|
273 |
)
|
274 |
-
btn = gr.Button(value='Submit')
|
275 |
|
276 |
-
|
277 |
-
chatbot = gr.Chatbot(
|
|
|
|
|
|
|
278 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
279 |
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
inputs=[chatbot, url, file, question, openAI_key, model],
|
285 |
-
outputs=[chatbot],
|
286 |
)
|
287 |
|
288 |
demo.launch()
|
|
|
8 |
import tensorflow_hub as hub
|
9 |
import openai
|
10 |
import gradio as gr
|
11 |
+
from sklearn.neighbors import NearestNeighbors
|
12 |
|
13 |
def download_pdf(url, output_path):
|
14 |
urllib.request.urlretrieve(url, output_path)
|
|
|
240 |
description = """ PDF GPT Turbo allows you to chat with your PDF files. It uses Google's Universal Sentence Encoder with Deep averaging network (DAN) to give hallucination free response by improving the embedding quality of OpenAI. It cites the page number in square brackets([Page No.]) and shows where the information is located, adding credibility to the responses."""
|
241 |
|
242 |
with gr.Blocks(css="""#chatbot { font-size: 14px; min-height: 1200; }""") as demo:
|
|
|
243 |
gr.Markdown(f'<center><h3>{title}</h3></center>')
|
244 |
gr.Markdown(description)
|
245 |
|
246 |
with gr.Row():
|
247 |
+
with gr.Column():
|
248 |
+
# API Key and File Inputs
|
249 |
+
with gr.Accordion("API Key and PDF"):
|
250 |
+
openAI_key = gr.Textbox(label='Enter your OpenAI API key here', type='password')
|
251 |
+
url = gr.Textbox(label='Enter PDF URL here (Example: https://arxiv.org/pdf/1706.03762.pdf)')
|
|
|
252 |
gr.Markdown("<center><h4>OR<h4></center>")
|
253 |
+
file = gr.File(label='Upload your PDF/Research Paper/Book here', file_types=['.pdf'])
|
254 |
+
|
255 |
+
# Model Selection
|
|
|
|
|
|
|
|
|
256 |
model = gr.Radio(
|
257 |
choices=[
|
258 |
'gpt-3.5-turbo',
|
|
|
266 |
label='Select Model',
|
267 |
value='gpt-3.5-turbo'
|
268 |
)
|
|
|
269 |
|
270 |
+
# Chat Interface
|
271 |
+
chatbot = gr.Chatbot(label="Chat History", type="messages")
|
272 |
+
msg = gr.Textbox(label="Enter your question here", lines=2)
|
273 |
+
submit_btn = gr.Button("Submit")
|
274 |
+
clear = gr.ClearButton([msg, chatbot])
|
275 |
|
276 |
+
# Example Questions
|
277 |
+
gr.Examples(
|
278 |
+
[[q] for q in questions],
|
279 |
+
inputs=[msg],
|
280 |
+
label="PRE-DEFINED QUESTIONS: Click on a question to auto-fill the input box",
|
281 |
+
)
|
282 |
+
|
283 |
+
def respond(message, chat_history, url_value, file_value, key_value, model_value):
|
284 |
+
if message.strip() == "":
|
285 |
+
return "", chat_history # Return empty message if no input
|
286 |
+
|
287 |
+
try:
|
288 |
+
# Ensure chat_history is initialized properly
|
289 |
+
if chat_history is None:
|
290 |
+
chat_history = []
|
291 |
+
|
292 |
+
if key_value.strip() == '':
|
293 |
+
chat_history.append({"role": "user", "content": message})
|
294 |
+
chat_history.append({"role": "assistant", "content": '[ERROR]: Please enter your OpenAI API key'})
|
295 |
+
return "", chat_history
|
296 |
+
|
297 |
+
if url_value.strip() == '' and file_value is None:
|
298 |
+
chat_history.append({"role": "user", "content": message})
|
299 |
+
chat_history.append({"role": "assistant", "content": '[ERROR]: Both URL and PDF are empty. Provide at least one'})
|
300 |
+
return "", chat_history
|
301 |
+
|
302 |
+
# Process PDF and generate answer
|
303 |
+
if url_value.strip() != '':
|
304 |
+
download_pdf(url_value, 'corpus.pdf')
|
305 |
+
load_recommender('corpus.pdf')
|
306 |
+
else:
|
307 |
+
old_file_name = file_value.name
|
308 |
+
file_name = old_file_name[:-12] + old_file_name[-4:]
|
309 |
+
os.rename(old_file_name, file_name)
|
310 |
+
load_recommender(file_name)
|
311 |
+
|
312 |
+
answer = generate_answer(message, key_value, model_value)
|
313 |
+
|
314 |
+
chat_history.append({"role": "user", "content": message})
|
315 |
+
chat_history.append({"role": "assistant", "content": answer})
|
316 |
+
|
317 |
+
return "", chat_history
|
318 |
+
|
319 |
+
except Exception as e:
|
320 |
+
chat_history.append({"role": "user", "content": message})
|
321 |
+
chat_history.append({"role": "assistant", "content": f'[ERROR]: {str(e)}'})
|
322 |
+
return "", chat_history
|
323 |
+
|
324 |
+
submit_btn.click(
|
325 |
+
respond,
|
326 |
+
[msg, chatbot, url, file, openAI_key, model],
|
327 |
+
[msg, chatbot]
|
328 |
+
)
|
329 |
|
330 |
+
msg.submit(
|
331 |
+
respond,
|
332 |
+
[msg, chatbot, url, file, openAI_key, model],
|
333 |
+
[msg, chatbot]
|
|
|
|
|
334 |
)
|
335 |
|
336 |
demo.launch()
|