import gradio as gr
from dotenv import load_dotenv
from openai import OpenAI

load_dotenv()

client = OpenAI()

# Backend: Python
def echo(message, history):
    # Convert Gradio history format to OpenAI messages format
    messages = [
        {"role": "system", "content": "You are a helpful LLM teacher."}
    ]
    
    # Add chat history
    for user_msg, bot_msg in history:
        messages.append({"role": "user", "content": user_msg})
        messages.append({"role": "assistant", "content": bot_msg})
    
    # Add current message
    messages.append({"role": "user", "content": message})
    
    # Get response from OpenAI
    completion = client.chat.completions.create(
        model="gpt-4o-mini",
        messages=messages
    )
    
    return completion.choices[0].message.content

# Frontend: Gradio
demo = gr.ChatInterface(
    fn=echo,
    examples=["I want to learn about LLMs", "What is NLP", "What is RAG"],
    title="LLM Mentor"
)
demo.launch()