Spaces:
Sleeping
Sleeping
import gradio as gr | |
import PyPDF2 | |
import openai | |
from config import OPENAI_API_KEY | |
import pandas as pd | |
import json | |
import re | |
import os | |
openai.api_key = os.getenv("OPENAI_API_KEY") | |
if not openai.api_key: | |
try: | |
openai.api_key = OPENAI_API_KEY | |
except NameError: | |
print("API key is not set in the environment or as a variable.") | |
class PDFChat: | |
def __init__(self): | |
self.pdf_text = "" | |
self.chat_history = [] | |
self.system_prompt = """You are a knowledgeable assistant specializing in microcontrollers from various manufacturers including but not limited to Renesas, Texas Instruments (TI), and STMicroelectronics (STM). | |
When comparing microcontrollers, always provide structured data in a JSON format that can be converted to a table. | |
Focus on key specifications like CPU frequency, memory, peripherals, ADC Resolution, Flash Memory, temperature range, and special features. | |
Consider all manufacturers' products when making recommendations based on application requirements.""" | |
def extract_text_from_pdf(self, pdf_file): | |
if not pdf_file: | |
return "Please upload a PDF file first." | |
try: | |
self.pdf_text = "" | |
with open(pdf_file.name, "rb") as file: | |
reader = PyPDF2.PdfReader(file) | |
for page in reader.pages: | |
self.pdf_text += page.extract_text() + "\n" | |
return "PDF loaded successfully! You can now ask questions." | |
except Exception as e: | |
return f"Error loading PDF: {str(e)}" | |
def clear_pdf(self): | |
self.pdf_text = "" | |
return "PDF content cleared." | |
def clear_chat_history(self): | |
self.chat_history = [] | |
return "", None | |
def extract_json_from_text(self, text): | |
"""Extract JSON data from the response text""" | |
json_match = re.search(r'```json\s*(.*?)\s*```', text, re.DOTALL) | |
if json_match: | |
json_str = json_match.group(1) | |
else: | |
json_match = re.search(r'({[\s\S]*})', text) | |
if json_match: | |
json_str = json_match.group(1) | |
else: | |
return None | |
try: | |
return json.loads(json_str) | |
except json.JSONDecodeError: | |
return None | |
def answer_question(self, question): | |
if not question: | |
return "", None | |
structured_prompt = """ | |
Based on the application requirements, recommend suitable microcontrollers and | |
provide your response in the following JSON format wrapped in ```json ```: | |
{ | |
"explanation": "Your textual explanation here", | |
"comparison_table": [ | |
{ | |
"Feature": "feature name", | |
"Option1": "value", | |
"Option2": "value", | |
... | |
}, | |
... | |
] | |
} | |
""" | |
messages = [ | |
{"role": "system", "content": self.system_prompt}, | |
{"role": "system", "content": structured_prompt} | |
] | |
if self.pdf_text: | |
messages.append({"role": "system", "content": f"PDF Content: {self.pdf_text}"}) | |
for human, assistant in self.chat_history: | |
messages.append({"role": "user", "content": human}) | |
messages.append({"role": "assistant", "content": assistant}) | |
messages.append({"role": "user", "content": question}) | |
try: | |
response = openai.ChatCompletion.create( | |
# model="gpt-4-turbo", | |
model="gpt-4o-mini" , | |
messages=messages | |
) | |
response_text = response.choices[0].message['content'] | |
json_data = self.extract_json_from_text(response_text) | |
if json_data and "comparison_table" in json_data: | |
df = pd.DataFrame(json_data["comparison_table"]) | |
explanation = json_data.get('explanation', response_text) | |
self.chat_history.append((question, explanation)) | |
return explanation, df | |
else: | |
self.chat_history.append((question, response_text)) | |
return response_text, None | |
except Exception as e: | |
error_message = f"Error generating response: {str(e)}" | |
return error_message, None | |
pdf_chat = PDFChat() | |
with gr.Blocks() as demo: | |
gr.Markdown("# Renesas Chatbot") | |
with gr.Row(): | |
with gr.Column(scale=1): | |
gr.Markdown("### PDF Controls") | |
pdf_input = gr.File( | |
label="Upload PDF", | |
file_types=[".pdf"] | |
) | |
with gr.Row(): | |
load_button = gr.Button("Load PDF") | |
clear_pdf_button = gr.Button("Clear PDF") | |
status_text = gr.Textbox( | |
label="Status", | |
interactive=False | |
) | |
# PDF example right under PDF controls | |
gr.Examples( | |
[[os.path.join(os.path.dirname(__file__), "renesas-ra6m1-group-datasheet.pdf")]], | |
inputs=[pdf_input], | |
label="Example PDF" | |
) | |
with gr.Column(scale=2): | |
gr.Markdown("### Microcontroller Selection Interface") | |
question_input = gr.Textbox( | |
label="Briefly describe your target application for controller recommendation", | |
placeholder="Example: Industrial motor control system with precise temperature monitoring...", | |
lines=3, | |
value="" | |
) | |
explanation_text = gr.Textbox( | |
label="Explanation", | |
interactive=False, | |
lines=4 | |
) | |
table_output = gr.DataFrame( | |
label="Comparison Table", | |
interactive=False, | |
wrap=True | |
) | |
with gr.Row(): | |
submit_button = gr.Button("Send") | |
clear_history_button = gr.Button("Clear Chat History") | |
# Example applications section | |
gr.Markdown("### Example Applications") | |
gr.Examples( | |
[ | |
"Industrial automation system requiring precise motion control and multiple sensor inputs", | |
"Battery-powered IoT device with wireless connectivity and low power requirements", | |
"High-performance motor control application with real-time processing needs", | |
"Smart building management system with multiple environmental sensors" | |
], | |
inputs=question_input, | |
label="Example Queries" | |
) | |
def handle_question(question): | |
explanation, df = pdf_chat.answer_question(question) | |
return explanation, df, question | |
load_button.click( | |
pdf_chat.extract_text_from_pdf, | |
inputs=[pdf_input], | |
outputs=[status_text] | |
) | |
clear_pdf_button.click( | |
pdf_chat.clear_pdf, | |
outputs=[status_text] | |
) | |
clear_history_button.click( | |
pdf_chat.clear_chat_history, | |
outputs=[explanation_text, table_output] | |
) | |
question_input.submit( | |
handle_question, | |
inputs=[question_input], | |
outputs=[explanation_text, table_output, question_input] | |
) | |
submit_button.click( | |
handle_question, | |
inputs=[question_input], | |
outputs=[explanation_text, table_output, question_input] | |
) | |
if __name__ == "__main__": | |
demo.launch(debug=True) |