Spaces:
Sleeping
Sleeping
from PyPDF2 import PdfReader | |
from markdownify import markdownify | |
import gradio as gr | |
import openai | |
# Persistent System Prompt | |
LOSSDOG_PROMPT = """ | |
<LossDogFramework version="3.0"> | |
<Identity> | |
<Description> | |
You are Loss Dog, a cutting-edge AI career advisor, resume analyzer, and builder. Your primary role is to: | |
- Read and analyze the user's resume thoroughly. | |
- Use the resume as a knowledge context for all interactions. | |
- Engage with the user by answering questions, identifying areas of improvement, and offering suggestions. | |
</Description> | |
</Identity> | |
<CoreDirectives> | |
<Mission> | |
Your mission is to provide actionable resume advice. Always leverage the uploaded resume to give feedback, | |
highlight strengths, and identify weaknesses. | |
</Mission> | |
</CoreDirectives> | |
</LossDogFramework> | |
""" | |
def extract_text_from_file(file_path: str, file_name: str) -> str: | |
"""Extract text from a PDF or TXT file.""" | |
if file_name.endswith(".pdf"): | |
try: | |
pdf_reader = PdfReader(file_path) | |
text = "\n".join(page.extract_text() for page in pdf_reader.pages) | |
return text | |
except Exception as e: | |
return f"Error reading PDF file: {str(e)}" | |
elif file_name.endswith(".txt"): | |
try: | |
with open(file_path, "r") as f: | |
return f.read() | |
except Exception as e: | |
return f"Error reading text file: {str(e)}" | |
else: | |
return "Unsupported file format. Please upload a PDF or TXT file." | |
def convert_to_markdown(text: str) -> str: | |
"""Convert extracted file text to Markdown for neat display.""" | |
return markdownify(text, heading_style="ATX") | |
def interact_with_lossdog( | |
user_message: str, | |
markdown_text: str, | |
api_key: str, | |
history: list | |
) -> list: | |
""" | |
Generates the assistant's response, always including the resume content as context | |
alongside the conversation history. | |
""" | |
try: | |
openai.api_key = api_key | |
# Validate existing history entries | |
validated_history = [] | |
for msg in history: | |
if isinstance(msg, dict) and "role" in msg and "content" in msg: | |
validated_history.append({"role": msg["role"], "content": msg["content"]}) | |
# Build the messages for OpenAI Chat | |
messages = [ | |
{"role": "system", "content": LOSSDOG_PROMPT}, | |
{"role": "system", "content": f"Resume Content:\n{markdown_text}"} | |
] + validated_history | |
# Add the new user message at the end | |
messages.append({"role": "user", "content": user_message}) | |
# Create ChatCompletion | |
response = openai.ChatCompletion.create( | |
model="gpt-4o-mini", | |
messages=messages, | |
max_tokens=4000 # You can adjust this as needed | |
) | |
assistant_response = response.choices[0].message.content | |
# Update local (Gradio) history | |
validated_history.append({"role": "user", "content": user_message}) | |
validated_history.append({"role": "assistant", "content": assistant_response}) | |
return validated_history | |
except Exception as e: | |
# Append the error as an assistant message (for visibility) | |
history.append({"role": "assistant", "content": f"Error: {str(e)}"}) | |
return history | |
def create_demo(): | |
"""Build the Gradio app.""" | |
with gr.Blocks(css="#resume-preview {height:300px; overflow-y:auto; border:1px solid #ccc; padding:10px;}") as demo: | |
gr.Markdown(""" | |
# π LOSS Dog: AI-Powered Resume Advisor | |
**Steps**: | |
1. Upload your resume (PDF/TXT). It will appear in a scrollable box on the right. | |
2. Ask any questions or request feedback. LOSS Dog always references the uploaded resume. | |
3. Enjoy a back-and-forth conversation to refine your resume! | |
""") | |
# API Key | |
api_key = gr.Textbox( | |
label="OpenAI API Key", | |
placeholder="Enter your OpenAI API key...", | |
type="password" | |
) | |
# Layout | |
with gr.Row(): | |
with gr.Column(scale=3): | |
chatbot = gr.Chatbot(label="Chat with LOSS Dog", type="messages") | |
with gr.Column(scale=1): | |
markdown_preview = gr.Markdown(label="Resume Preview", elem_id="resume-preview") | |
# User Input | |
with gr.Row(): | |
user_input = gr.Textbox(label="Your Message", lines=1) | |
send_button = gr.Button("Send πΎ") | |
# File Upload | |
with gr.Row(): | |
upload = gr.File(label="Upload Your Resume (PDF or TXT)") | |
# States | |
history_state = gr.State([]) # Chat History | |
markdown_state = gr.State("") # Stored resume text in Markdown | |
# 1) File Upload Handler | |
def handle_upload(file, api_key): | |
""" | |
Extract text -> convert to Markdown -> display in the right pane. | |
We do NOT modify the chat history here; user can start fresh or continue. | |
""" | |
if not file: | |
return "No file uploaded.", gr.update(value=[]) | |
text = extract_text_from_file(file.name, file.name) | |
if text.startswith("Error"): | |
# Show error in preview | |
return text, gr.update(value=[]) | |
resume_md = convert_to_markdown(text) | |
# Keep the conversation? Up to you. We'll keep existing conversation. | |
return resume_md, gr.update(value=[]) | |
# 2) Chat Message Handler | |
def handle_message(user_message, api_key, markdown_text, history): | |
""" | |
Called when the user sends a new message. We pass the stored resume + history. | |
""" | |
updated_history = interact_with_lossdog(user_message, markdown_text, api_key, history) | |
return updated_history, updated_history | |
# Link File Upload -> handle_upload | |
upload.change( | |
handle_upload, | |
inputs=[upload, api_key], | |
outputs=[markdown_preview, history_state] | |
) | |
# Link Send Button -> handle_message | |
send_button.click( | |
handle_message, | |
inputs=[user_input, api_key, markdown_state, history_state], | |
outputs=[chatbot, history_state] | |
) | |
# Any time the user uploads a file, also store the resume text in markdown_state | |
# so subsequent messages can see it. | |
def store_resume_in_state(markdown_content): | |
return markdown_content | |
# We'll create a small chain that ensures markdown_preview -> markdown_state | |
markdown_preview.change( | |
store_resume_in_state, | |
inputs=[markdown_preview], | |
outputs=[markdown_state] | |
) | |
return demo | |
if __name__ == "__main__": | |
demo = create_demo() | |
demo.launch() |