halimbahae's picture
Update app.py
75a7bc2 verified
import gradio as gr
from huggingface_hub import InferenceClient
import PyPDF2
import os
# Initialisation du modèle Hugging Face
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Messages système pour guider le modèle
SYSTEM_PROMPT = {
"fr": "Tu es un assistant pédagogique qui aide les professeurs à créer des cours et analyser des documents PDF.",
"en": "You are an educational assistant helping teachers create courses and analyze PDF documents."
}
# 📄 Fonction pour extraire le texte d'un PDF
def extract_text_from_pdf(pdf_path):
text = ""
try:
with open(pdf_path, "rb") as f:
reader = PyPDF2.PdfReader(f)
for page in reader.pages:
if page.extract_text():
text += page.extract_text() + "\n"
return text if text else "Impossible d'extraire du texte de ce PDF."
except Exception as e:
return f"Erreur lors de la lecture du PDF : {str(e)}"
# 🧠 Fonction du chatbot + PDF RAG
def generate_response(subject, history, lang, pdf_path, max_tokens, temperature, top_p):
system_message = SYSTEM_PROMPT.get(lang, SYSTEM_PROMPT["en"]) # Sélection de la langue
# Initialize messages with the system message
messages = [{"role": "system", "content": system_message}]
# 🔄 Correct format for history messages
for message in history:
if isinstance(message, dict) and "role" in message and "content" in message:
messages.append(message)
# 📄 Add PDF content if available
if pdf_path:
pdf_text = extract_text_from_pdf(pdf_path)
messages.append({"role": "user", "content": f"Voici un document PDF pertinent : {pdf_text[:1000]}..."}) # Limit to first 1000 characters
# Add user's request to create a course
messages.append({"role": "user", "content": f"Crée un cours sur : {subject}"})
# 🔥 Stream response from HuggingFace model
response = ""
for message in client.chat_completion(
messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p
):
token = message.choices[0].delta.content
response += token
yield response
# 🎨 Interface utilisateur Gradio
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("# 🎓 Teacher Assistant Chatbot avec PDF RAG")
with gr.Row():
subject_input = gr.Textbox(label="📌 Sujet du cours", placeholder="Ex: Apprentissage automatique")
lang_select = gr.Dropdown(choices=["fr", "en"], value="fr", label="🌍 Langue")
pdf_upload = gr.File(label="📄 Télécharger un PDF (optionnel)", type="filepath") # ✅ Correction ici
chat = gr.Chatbot(type="messages") # ✅ Correction du format des messages
with gr.Row():
max_tokens = gr.Slider(minimum=100, maximum=2048, value=512, step=1, label="📝 Max tokens")
temperature = gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="🔥 Température")
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="🎯 Top-p")
generate_button = gr.Button("🚀 Générer le cours")
generate_button.click(
generate_response,
inputs=[subject_input, chat, lang_select, pdf_upload, max_tokens, temperature, top_p],
outputs=chat
)
# 🔥 Lancer l'application
if __name__ == "__main__":
demo.launch()