Spaces:
Sleeping
Sleeping
File size: 5,173 Bytes
15fa2e5 0885208 15fa2e5 c99229a b9e4045 15fa2e5 822f5ca 15fa2e5 822f5ca 6f535b2 822f5ca 15fa2e5 c99229a 15fa2e5 e9677b5 0885208 15fa2e5 0885208 15fa2e5 c99229a 15fa2e5 c99229a 15fa2e5 c99229a 15fa2e5 c99229a 0885208 c99229a a6c748c c99229a 15fa2e5 9608fda 15fa2e5 0885208 15fa2e5 0885208 15fa2e5 0885208 e9677b5 0885208 c99229a 0885208 15fa2e5 0885208 15fa2e5 0885208 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
# app.py β Updated version for Hugging Face token & CPU
import os
import tempfile
import textwrap
from datetime import datetime
from typing import List, Dict, Any, Optional
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
from src.conversation import ConversationMemory
from src.chatbot import LocalChatbot
# ----------------------
# HUGGING FACE SETTINGS
# ----------------------
HF_TOKEN = os.getenv("HF_TOKEN") # your Hugging Face token stored as secret variable
MODEL_PATH = "RedHatAI/gemma-2-2b-it-quantized.w4a16" # public or private model
# ----------------------
# LOAD MODEL + TOKENIZER
# ----------------------
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, use_fast=True, token=HF_TOKEN)
llm = AutoModelForCausalLM.from_pretrained(
MODEL_PATH,
device_map="cpu",
dtype="auto",
token=HF_TOKEN
)
# ----------------------
# MEMORY + CHATBOT
# ----------------------
memory = ConversationMemory(max_len=60)
bot = LocalChatbot(llm, memory, tokenizer=tokenizer)
INTENT_TEMPLATES = {
"math": "You are a math solver. Solve step-by-step only.",
"code": "You are a coding expert. Provide clean, working code.",
"civics": "Explain clearly like a Class 10 SST teacher.",
"exam": "Prepare concise exam-focused notes and important questions."
}
# ----------------------
# HELPER FUNCTIONS
# ----------------------
def now_ts():
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def generate_reply(user_msg: str, history: Optional[List[Dict[str, Any]]]):
if history is None:
history = []
if not user_msg.strip():
return history
# Detect intent
intent = None
low = user_msg.lower()
for key in INTENT_TEMPLATES:
if low.startswith(key):
intent = key
user_msg = user_msg[len(key):].strip()
break
system_prefix = INTENT_TEMPLATES.get(intent, None)
if system_prefix:
prompt = f"{system_prefix}\nUser: {user_msg}"
else:
prompt = f"User: {user_msg}"
# Generate reply using LocalChatbot
bot_reply = bot.ask(prompt)
ts = now_ts()
bot_reply_ts = f"{bot_reply}\n\nπ {ts}"
history.append({"role": "user", "content": user_msg})
history.append({"role": "assistant", "content": bot_reply_ts})
try:
memory.add(user_msg, bot_reply)
except:
pass
return history
# ----------------------
# EXPORT TXT/PDF
# ----------------------
def export_chat_files(history: List[Dict[str, Any]]) -> Dict[str, Optional[str]]:
tmpdir = tempfile.gettempdir()
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
txt_path = os.path.join(tmpdir, f"chat_history_{timestamp}.txt")
with open(txt_path, "w", encoding="utf-8") as f:
for msg in history:
content = msg.get("content", "")
lines = content.splitlines()
lines = [l.replace("USER:", "").replace("ASSISTANT:", "").strip() for l in lines]
f.write("\n".join(lines).strip() + "\n")
f.write("-" * 60 + "\n")
pdf_path = None
try:
from reportlab.lib.pagesizes import A4
from reportlab.pdfgen import canvas
pdf_path = os.path.join(tmpdir, f"chat_history_{timestamp}.pdf")
c = canvas.Canvas(pdf_path, pagesize=A4)
width, height = A4
margin = 40
textobject = c.beginText(margin, height - margin)
textobject.setFont("Helvetica", 10)
with open(txt_path, "r", encoding="utf-8") as fh:
for line in fh:
for wrapped in textwrap.wrap(line.rstrip(), 100):
textobject.textLine(wrapped)
c.drawText(textobject)
c.showPage()
c.save()
except:
pdf_path = None
return {"txt": txt_path, "pdf": pdf_path}
# ----------------------
# UI
# ----------------------
with gr.Blocks(title="Tayyab β Chatbot (API)") as demo:
with gr.Row():
with gr.Column(scale=1, min_width=220):
gr.Markdown("### β‘ Tools & Export")
new_chat_btn = gr.Button("β New Chat")
export_btn = gr.Button("π₯ Export TXT/PDF")
with gr.Column(scale=3):
gr.Markdown("<h3>Smart Learning Assistant - Tayyab</h3>")
chatbot = gr.Chatbot(height=480)
msg = gr.Textbox(placeholder="Type a message", show_label=False, lines=3)
send_btn = gr.Button("Send")
file_txt = gr.File(visible=False)
file_pdf = gr.File(visible=False)
# Chat actions
send_btn.click(generate_reply, inputs=[msg, chatbot], outputs=[chatbot])
msg.submit(generate_reply, inputs=[msg, chatbot], outputs=[chatbot])
def new_chat():
memory.clear()
return []
new_chat_btn.click(new_chat, outputs=[chatbot])
def export_handler(history):
files = export_chat_files(history or [])
return (
gr.update(value=files.get("txt"), visible=True),
gr.update(value=files.get("pdf"), visible=bool(files.get("pdf")))
)
export_btn.click(export_handler, inputs=[chatbot], outputs=[file_txt, file_pdf])
if __name__ == "__main__":
demo.launch()
|