taesiri's picture
update
2adf285
raw
history blame
8.05 kB
import io
import os
import re
import tarfile
from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
import gradio as gr
import requests
import arxiv
def replace_texttt(text):
return re.sub(r"\\texttt\{(.*?)\}", r"*\1*", text)
def get_paper_info(paper_id):
# Create a search query with the arXiv ID
search = arxiv.Search(id_list=[paper_id])
# Fetch the paper using its arXiv ID
paper = next(search.results(), None)
if paper is not None:
# Return the paper's title and abstract
return paper.title, paper.summary
else:
return None, None
def download_arxiv_source(paper_id):
url = f"https://arxiv.org/e-print/{paper_id}"
# Get the tar file
response = requests.get(url)
response.raise_for_status()
# Open the tar file
tar = tarfile.open(fileobj=io.BytesIO(response.content), mode="r")
# Load all .tex files into memory, including their subdirectories
tex_files = {
member.name: tar.extractfile(member).read().decode("utf-8")
for member in tar.getmembers()
if member.name.endswith(".tex")
}
# Load all .tex files into memory, including their subdirectories
tex_files = {
member.name: tar.extractfile(member).read().decode("utf-8")
for member in tar.getmembers()
if member.isfile() and member.name.endswith(".tex")
}
# Pattern to match \input{filename} and \include{filename}
pattern = re.compile(r"\\(input|include){(.*?)}")
# Function to replace \input{filename} and \include{filename} with file contents
def replace_includes(text):
output = []
for line in text.split("\n"):
match = re.search(pattern, line)
if match:
command, filename = match.groups()
# LaTeX automatically adds .tex extension for \input and \include commands
if not filename.endswith(".tex"):
filename += ".tex"
if filename in tex_files:
output.append(replace_includes(tex_files[filename]))
else:
output.append(f"% {line} % FILE NOT FOUND")
else:
output.append(line)
return "\n".join(output)
if "main.tex" in tex_files:
# Start with the contents of main.tex
main_tex = replace_includes(tex_files["main.tex"])
else:
# No main.tex, concatenate all .tex files
main_tex = "\n".join(replace_includes(text) for text in tex_files.values())
return main_tex
class ContextualQA:
def __init__(self, client, model="claude-2.0"):
self.client = client
self.model = model
self.context = ""
self.questions = []
self.responses = []
def load_text(self, text):
self.context = text
def ask_question(self, question):
leading_prompt = "Give the following paper:"
trailing_prompt = "Now, answer the following question based on the content of the paper above. You can optionally use Markdown to format your answer or LaTeX typesetting to improve the presentation of your answer."
prompt = f"{HUMAN_PROMPT} {leading_prompt} {self.context} {trailing_prompt} {HUMAN_PROMPT} {question} {AI_PROMPT}"
response = self.client.completions.create(
prompt=prompt,
stop_sequences=[HUMAN_PROMPT],
max_tokens_to_sample=6000,
model=self.model,
stream=False,
)
answer = response.completion
self.questions.append(question)
self.responses.append(answer)
return answer
def clear_context(self):
self.context = ""
self.questions = []
self.responses = []
def __getstate__(self):
state = self.__dict__.copy()
del state["client"]
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.client = None
def load_context(paper_id):
try:
latex_source = download_arxiv_source(paper_id)
except Exception as e:
return None, [(f"Error loading paper with id {paper_id}.", str(e))]
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
qa_model = ContextualQA(client, model="claude-2.0")
qa_model.load_text(latex_source)
# Usage
title, abstract = get_paper_info(paper_id)
# remove special symbols from title and abstract
title = replace_texttt(title)
abstract = replace_texttt(abstract)
return (
qa_model,
[
(
f"Load the paper with id {paper_id}.",
f"\n**Title**: {title}\n\n**Abstract**: {abstract}\n\nPaper loaded, You can now ask questions.",
)
],
)
def answer_fn(qa_model, question, chat_history):
# if question is empty, tell user that they need to ask a question
if question == "":
chat_history.append(("No Question Asked", "Please ask a question."))
return qa_model, chat_history, ""
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
qa_model.client = client
try:
answer = qa_model.ask_question(question)
except Exception as e:
chat_history.append(("Error Asking Question", str(e)))
return qa_model, chat_history, ""
chat_history.append((question, answer))
return qa_model, chat_history, ""
def clear_context():
return []
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.HTML(
"""
<h1 style='text-align: center; font-size: 24px;'>
Explore ArXiv Papers in Depth with <code>claude-2.0</code> - Ask Questions and Get Answers Instantly
</h1>
"""
)
gr.HTML(
"""
<p style='text-align: justify; font-size: 18px; margin: 10px;'>
Explore the depths of ArXiv papers with our interactive app, powered by the advanced <code>claude-2.0</code> model. Ask detailed questions and get immediate, context-rich answers from academic papers.
</p>
"""
)
gr.HTML(
"""
<center>
<a href="https://huggingface.co/spaces/taesiri/ClaudeReadsArxiv?duplicate=true">
<img src="https://bit.ly/3gLdBN6" alt="Duplicate Space" style="vertical-align: middle; max-width: 100px; margin-right: 10px;">
</a>
<span style="font-size: 14px; vertical-align: middle;">
Duplicate the Space with your Anthropic API Key &nbsp;|&nbsp;
Follow me on Twitter for more updates: <a href="https://twitter.com/taesiri" target="_blank">@taesiri</a>
</span>
</center>
"""
)
with gr.Column():
with gr.Row():
paper_id_input = gr.Textbox(label="Enter Paper ID", value="2108.07258")
btn_load = gr.Button("Load Paper")
qa_model = gr.State()
with gr.Column():
chatbot = gr.Chatbot().style(color_map=("blue", "yellow"))
question_txt = gr.Textbox(
label="Question", lines=1, placeholder="Type your question here..."
)
btn_answer = gr.Button("Answer Question")
btn_clear = gr.Button("Clear Chat")
gr.HTML(
"""<center>All the inputs are being sent to Anthropic's Claude endpoints. Please refer to <a href="https://legal.anthropic.com/#privacy">this link</a> for privacy policy.</center>"""
)
gr.Markdown(
"## Acknowledgements\n"
"This project is made possible through the generous support of "
"[Anthropic](https://www.anthropic.com/), who provided free access to the `Claude-2.0` API."
)
btn_load.click(load_context, inputs=[paper_id_input], outputs=[qa_model, chatbot])
btn_answer.click(
answer_fn,
inputs=[qa_model, question_txt, chatbot],
outputs=[qa_model, chatbot, question_txt],
)
question_txt.submit(
answer_fn,
inputs=[qa_model, question_txt, chatbot],
outputs=[qa_model, chatbot, question_txt],
)
btn_clear.click(clear_context, outputs=[chatbot])
demo.launch()