Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ from itertools import islice
|
|
6 |
from datetime import datetime
|
7 |
import re # for parsing <think> blocks
|
8 |
import gradio as gr
|
|
|
9 |
import torch
|
10 |
from transformers import pipeline, TextIteratorStreamer
|
11 |
from transformers import AutoTokenizer
|
@@ -245,184 +246,180 @@ def update_default_prompt(enable_search):
|
|
245 |
today = datetime.now().strftime('%Y-%m-%d')
|
246 |
return f"You are a helpful assistant. Today is {today}."
|
247 |
|
|
|
|
|
|
|
248 |
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
255 |
|
|
|
|
|
256 |
|
257 |
-
|
258 |
-
""
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
def get_ui_text(lang):
|
266 |
-
"""Get UI text based on language"""
|
267 |
-
texts = {
|
268 |
-
"zh": {
|
269 |
-
"title": "## Yee-R1 Demo",
|
270 |
-
"subtitle": "小熠(Yee)AI 数据安全专家",
|
271 |
-
"dark_mode": "🌙 暗黑模式",
|
272 |
-
"light_mode": "☀️ 明亮模式",
|
273 |
-
"lang_btn": "🌐 English",
|
274 |
-
"select_model": "选择模型",
|
275 |
-
"enable_search": "启用网络搜索",
|
276 |
-
"system_prompt": "系统提示词",
|
277 |
-
"gen_params": "### 生成参数",
|
278 |
-
"max_tokens": "最大令牌数",
|
279 |
-
"temperature": "温度",
|
280 |
-
"top_k": "Top-K",
|
281 |
-
"top_p": "Top-P",
|
282 |
-
"repeat_penalty": "重复惩罚",
|
283 |
-
"search_settings": "### 网络搜索设置",
|
284 |
-
"max_results": "最大结果数",
|
285 |
-
"max_chars": "每个结果最大字符数",
|
286 |
-
"search_timeout": "搜索超时时间 (秒)",
|
287 |
-
"clear_chat": "清空对话",
|
288 |
-
"cancel_gen": "取消生成",
|
289 |
-
"placeholder": "输入您的消息并按回车..."
|
290 |
-
},
|
291 |
-
"en": {
|
292 |
-
"title": "## Yee-R1 Demo",
|
293 |
-
"subtitle": "Yee AI Data Security Expert",
|
294 |
-
"dark_mode": "🌙 Dark Mode",
|
295 |
-
"light_mode": "☀️ Light Mode",
|
296 |
-
"lang_btn": "🌐 中文",
|
297 |
-
"select_model": "Select Model",
|
298 |
-
"enable_search": "Enable Web Search",
|
299 |
-
"system_prompt": "System Prompt",
|
300 |
-
"gen_params": "### Generation Parameters",
|
301 |
-
"max_tokens": "Max Tokens",
|
302 |
-
"temperature": "Temperature",
|
303 |
-
"top_k": "Top-K",
|
304 |
-
"top_p": "Top-P",
|
305 |
-
"repeat_penalty": "Repetition Penalty",
|
306 |
-
"search_settings": "### Web Search Settings",
|
307 |
-
"max_results": "Max Results",
|
308 |
-
"max_chars": "Max Chars/Result",
|
309 |
-
"search_timeout": "Search Timeout (s)",
|
310 |
-
"clear_chat": "Clear Chat",
|
311 |
-
"cancel_gen": "Cancel Generation",
|
312 |
-
"placeholder": "Type your message and press Enter..."
|
313 |
-
}
|
314 |
-
}
|
315 |
-
return texts[lang]
|
316 |
|
|
|
|
|
317 |
|
318 |
-
# ------------------------------
|
319 |
-
# Gradio UI
|
320 |
-
# ------------------------------
|
321 |
-
with gr.Blocks(title="Yee-R1-Demo", theme=gr.themes.Default()) as demo:
|
322 |
-
# States
|
323 |
-
theme_state = gr.State("light")
|
324 |
-
lang_state = gr.State("zh")
|
325 |
-
|
326 |
-
# Header with controls
|
327 |
-
with gr.Row():
|
328 |
-
title_md = gr.Markdown("## Yee-R1 Demo")
|
329 |
-
with gr.Row(scale=0):
|
330 |
-
lang_btn = gr.Button("🌐 English", size="sm")
|
331 |
-
theme_btn = gr.Button("🌙 暗黑模式", size="sm")
|
332 |
-
|
333 |
-
subtitle_md = gr.Markdown("小熠(Yee)AI 数据安全专家")
|
334 |
-
|
335 |
with gr.Row():
|
336 |
with gr.Column(scale=3):
|
337 |
-
model_dd = gr.Dropdown(label="
|
338 |
-
search_chk = gr.Checkbox(label="
|
339 |
-
sys_prompt = gr.Textbox(label="
|
340 |
-
|
341 |
-
max_tok = gr.Slider(64, 16384, value=4096, step=32, label="
|
342 |
-
temp = gr.Slider(0.1, 2.0, value=0.6, step=0.1, label="
|
343 |
k = gr.Slider(1, 100, value=40, step=1, label="Top-K")
|
344 |
p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-P")
|
345 |
-
rp = gr.Slider(1.0, 2.0, value=1.2, step=0.1, label="
|
346 |
-
|
347 |
-
mr = gr.Number(value=6, precision=0, label="
|
348 |
-
mc = gr.Number(value=600, precision=0, label="
|
349 |
-
st = gr.Slider(minimum=0.0, maximum=30.0, step=0.5, value=5.0, label="
|
350 |
-
clr = gr.Button("
|
351 |
-
cnl = gr.Button("
|
352 |
with gr.Column(scale=7):
|
353 |
-
chat = gr.Chatbot(type="messages", show_copy_all_button=True
|
354 |
-
txt = gr.Textbox(placeholder="
|
355 |
dbg = gr.Markdown()
|
356 |
|
357 |
-
#
|
358 |
-
|
359 |
-
|
360 |
-
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
|
370 |
-
|
371 |
-
|
372 |
-
|
373 |
-
|
374 |
-
|
375 |
-
|
376 |
-
|
377 |
-
|
378 |
-
|
379 |
-
|
380 |
-
|
381 |
-
|
382 |
-
|
383 |
-
|
384 |
-
|
385 |
-
|
386 |
-
|
387 |
-
|
388 |
-
|
389 |
-
|
390 |
-
|
391 |
-
|
392 |
-
|
393 |
-
|
394 |
-
|
395 |
-
|
396 |
-
|
397 |
-
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
410 |
)
|
411 |
-
|
412 |
-
|
413 |
-
fn=
|
414 |
-
inputs=[
|
415 |
outputs=[
|
416 |
-
|
417 |
-
|
418 |
-
max_tok, temp, k, p, rp, search_settings_md,
|
419 |
-
mr, mc, st, clr, cnl, txt
|
420 |
]
|
421 |
)
|
422 |
-
|
|
|
|
|
|
|
423 |
txt.submit(fn=chat_response,
|
424 |
inputs=[txt, chat, sys_prompt, search_chk, mr, mc,
|
425 |
model_dd, max_tok, temp, k, p, rp, st],
|
426 |
outputs=[chat, dbg])
|
427 |
-
|
428 |
demo.launch()
|
|
|
6 |
from datetime import datetime
|
7 |
import re # for parsing <think> blocks
|
8 |
import gradio as gr
|
9 |
+
from typing import Dict, Union
|
10 |
import torch
|
11 |
from transformers import pipeline, TextIteratorStreamer
|
12 |
from transformers import AutoTokenizer
|
|
|
246 |
today = datetime.now().strftime('%Y-%m-%d')
|
247 |
return f"You are a helpful assistant. Today is {today}."
|
248 |
|
249 |
+
# ------------------------------
|
250 |
+
# Gradio UI
|
251 |
+
# ------------------------------
|
252 |
|
253 |
+
# UI strings translations
|
254 |
+
UI_TEXTS: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = {
|
255 |
+
"en": {
|
256 |
+
"title": "Yee-R1 Demo",
|
257 |
+
"description": "Yee AI Data Security Expert",
|
258 |
+
"select_model": "Select Model",
|
259 |
+
"enable_search": "Enable Web Search",
|
260 |
+
"system_prompt": "System Prompt",
|
261 |
+
"generation_parameters": "Generation Parameters",
|
262 |
+
"max_tokens": "Max Tokens",
|
263 |
+
"temperature": "Temperature",
|
264 |
+
"top_k": "Top-K",
|
265 |
+
"top_p": "Top-P",
|
266 |
+
"repeat_penalty": "Repetition Penalty",
|
267 |
+
"web_search_settings": "Web Search Settings",
|
268 |
+
"max_results": "Max Results",
|
269 |
+
"max_chars_result": "Max Chars/Result",
|
270 |
+
"search_timeout": "Search Timeout (s)",
|
271 |
+
"clear_chat": "Clear Chat",
|
272 |
+
"cancel_generation": "Cancel Generation",
|
273 |
+
"chat_placeholder": "Type your message and press Enter...",
|
274 |
+
"theme_label": "Select Theme",
|
275 |
+
"language_label": "Select Language",
|
276 |
+
"theme_light": "Light",
|
277 |
+
"theme_dark": "Dark",
|
278 |
+
"language_en": "English",
|
279 |
+
"language_zh": "Chinese",
|
280 |
+
},
|
281 |
+
"zh": {
|
282 |
+
"title": "小熠演示",
|
283 |
+
"description": "小熠AI数据安全专家",
|
284 |
+
"select_model": "选择模型",
|
285 |
+
"enable_search": "启用网络搜索",
|
286 |
+
"system_prompt": "系统提示词",
|
287 |
+
"generation_parameters": "生成参数",
|
288 |
+
"max_tokens": "最大生成长度",
|
289 |
+
"temperature": "温度",
|
290 |
+
"top_k": "Top-K",
|
291 |
+
"top_p": "Top-P",
|
292 |
+
"repeat_penalty": "重复惩罚",
|
293 |
+
"web_search_settings": "网络搜索设置",
|
294 |
+
"max_results": "最大结果数",
|
295 |
+
"max_chars_result": "每个结果最大字符数",
|
296 |
+
"search_timeout": "搜索超时 (秒)",
|
297 |
+
"clear_chat": "清空聊天",
|
298 |
+
"cancel_generation": "取消生成",
|
299 |
+
"chat_placeholder": "请输入消息,按回车发送...",
|
300 |
+
"theme_label": "选择主题",
|
301 |
+
"language_label": "选择语言",
|
302 |
+
"theme_light": "明亮模式",
|
303 |
+
"theme_dark": "暗黑模式",
|
304 |
+
"language_en": "English",
|
305 |
+
"language_zh": "中文",
|
306 |
+
}
|
307 |
+
}
|
308 |
|
309 |
+
def get_ui_text(language, key):
|
310 |
+
return UI_TEXTS.get(language, UI_TEXTS["en"]).get(key, "")
|
311 |
|
312 |
+
with gr.Blocks(title=get_ui_text("en", "title")) as demo:
|
313 |
+
theme_dropdown = gr.Dropdown(label=get_ui_text("en", "theme_label"),
|
314 |
+
choices=[get_ui_text("en", "theme_light"), get_ui_text("en", "theme_dark")],
|
315 |
+
value=get_ui_text("en", "theme_light"))
|
316 |
+
language_dropdown = gr.Dropdown(label=get_ui_text("en", "language_label"),
|
317 |
+
choices=[get_ui_text("en", "language_en"), get_ui_text("en", "language_zh")],
|
318 |
+
value=get_ui_text("en", "language_en"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
319 |
|
320 |
+
title_md = gr.Markdown(value=get_ui_text("en", "title"))
|
321 |
+
description_md = gr.Markdown(value=get_ui_text("en", "description"))
|
322 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
323 |
with gr.Row():
|
324 |
with gr.Column(scale=3):
|
325 |
+
model_dd = gr.Dropdown(label=get_ui_text("en", "select_model"), choices=list(MODELS.keys()), value=list(MODELS.keys())[0])
|
326 |
+
search_chk = gr.Checkbox(label=get_ui_text("en", "enable_search"), value=True)
|
327 |
+
sys_prompt = gr.Textbox(label=get_ui_text("en", "system_prompt"), lines=3, value=update_default_prompt(search_chk.value))
|
328 |
+
gr.Markdown(f"### {get_ui_text('en', 'generation_parameters')}")
|
329 |
+
max_tok = gr.Slider(64, 16384, value=4096, step=32, label=get_ui_text("en", "max_tokens"))
|
330 |
+
temp = gr.Slider(0.1, 2.0, value=0.6, step=0.1, label=get_ui_text("en", "temperature"))
|
331 |
k = gr.Slider(1, 100, value=40, step=1, label="Top-K")
|
332 |
p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-P")
|
333 |
+
rp = gr.Slider(1.0, 2.0, value=1.2, step=0.1, label=get_ui_text("en", "repeat_penalty"))
|
334 |
+
gr.Markdown(f"### {get_ui_text('en', 'web_search_settings')}")
|
335 |
+
mr = gr.Number(value=6, precision=0, label=get_ui_text("en", "max_results"))
|
336 |
+
mc = gr.Number(value=600, precision=0, label=get_ui_text("en", "max_chars_result"))
|
337 |
+
st = gr.Slider(minimum=0.0, maximum=30.0, step=0.5, value=5.0, label=get_ui_text("en", "search_timeout"))
|
338 |
+
clr = gr.Button(get_ui_text("en", "clear_chat"))
|
339 |
+
cnl = gr.Button(get_ui_text("en", "cancel_generation"))
|
340 |
with gr.Column(scale=7):
|
341 |
+
chat = gr.Chatbot(type="messages", show_copy_all_button=True)
|
342 |
+
txt = gr.Textbox(placeholder=get_ui_text("en", "chat_placeholder"))
|
343 |
dbg = gr.Markdown()
|
344 |
|
345 |
+
# Function to update UI texts when language or theme changes
|
346 |
+
def update_ui(language, theme):
|
347 |
+
texts = UI_TEXTS.get("en") if language == UI_TEXTS["en"]["language_en"] else UI_TEXTS.get("zh")
|
348 |
+
# Map passed language (English/Chinese) to keys
|
349 |
+
lang_key = "en" if language == UI_TEXTS["en"]["language_en"] else "zh"
|
350 |
+
# Update labels
|
351 |
+
return (texts["title"], texts["description"],
|
352 |
+
texts["select_model"], texts["enable_search"], texts["system_prompt"],
|
353 |
+
texts["generation_parameters"], texts["max_tokens"], texts["temperature"], texts["repeat_penalty"],
|
354 |
+
texts["web_search_settings"], texts["max_results"], texts["max_chars_result"], texts["search_timeout"],
|
355 |
+
texts["clear_chat"], texts["cancel_generation"], texts["chat_placeholder"],
|
356 |
+
gr.themes.Dark() if theme == texts["theme_dark"] else gr.themes.Default())
|
357 |
+
|
358 |
+
def toggle_language_and_theme(language, theme):
|
359 |
+
# Return updated texts and theme to update all UI elements
|
360 |
+
lang_key = "en" if language == UI_TEXTS["en"]["language_en"] else "zh"
|
361 |
+
texts = UI_TEXTS[lang_key]
|
362 |
+
return {
|
363 |
+
"title_md": texts["title"],
|
364 |
+
"description_md": texts["description"],
|
365 |
+
"model_dd_label": texts["select_model"],
|
366 |
+
"search_chk_label": texts["enable_search"],
|
367 |
+
"sys_prompt_label": texts["system_prompt"],
|
368 |
+
"max_tok_label": texts["max_tokens"],
|
369 |
+
"temp_label": texts["temperature"],
|
370 |
+
"rp_label": texts["repeat_penalty"],
|
371 |
+
"mr_label": texts["max_results"],
|
372 |
+
"mc_label": texts["max_chars_result"],
|
373 |
+
"st_label": texts["search_timeout"],
|
374 |
+
"clr_label": texts["clear_chat"],
|
375 |
+
"cnl_label": texts["cancel_generation"],
|
376 |
+
"txt_placeholder": texts["chat_placeholder"],
|
377 |
+
"theme_obj": gr.themes.Dark() if theme == texts["theme_dark"] else gr.themes.Default()
|
378 |
+
}
|
379 |
+
|
380 |
+
# Update UI text labels on language or theme change
|
381 |
+
language_dropdown.change(
|
382 |
+
fn=toggle_language_and_theme,
|
383 |
+
inputs=[language_dropdown, theme_dropdown],
|
384 |
+
outputs=[
|
385 |
+
title_md, description_md, model_dd, search_chk, sys_prompt,
|
386 |
+
max_tok, temp, rp, mr, mc, st, clr, cnl, txt
|
387 |
+
],
|
388 |
+
_js="""
|
389 |
+
function updateLabels(resp) {
|
390 |
+
title_md.textContent = resp.title_md;
|
391 |
+
description_md.textContent = resp.description_md;
|
392 |
+
model_dd.label = resp.model_dd_label;
|
393 |
+
search_chk.label = resp.search_chk_label;
|
394 |
+
sys_prompt.label = resp.sys_prompt_label;
|
395 |
+
max_tok.label = resp.max_tok_label;
|
396 |
+
temp.label = resp.temp_label;
|
397 |
+
rp.label = resp.rp_label;
|
398 |
+
mr.label = resp.mr_label;
|
399 |
+
mc.label = resp.mc_label;
|
400 |
+
st.label = resp.st_label;
|
401 |
+
clr.textContent = resp.clr_label;
|
402 |
+
cnl.textContent = resp.cnl_label;
|
403 |
+
txt.placeholder = resp.txt_placeholder;
|
404 |
+
return;
|
405 |
+
}
|
406 |
+
"""
|
407 |
)
|
408 |
+
|
409 |
+
theme_dropdown.change(
|
410 |
+
fn=toggle_language_and_theme,
|
411 |
+
inputs=[language_dropdown, theme_dropdown],
|
412 |
outputs=[
|
413 |
+
title_md, description_md, model_dd, search_chk, sys_prompt,
|
414 |
+
max_tok, temp, rp, mr, mc, st, clr, cnl, txt
|
|
|
|
|
415 |
]
|
416 |
)
|
417 |
+
|
418 |
+
search_chk.change(fn=update_default_prompt, inputs=search_chk, outputs=sys_prompt)
|
419 |
+
clr.click(fn=lambda: ([], "", ""), outputs=[chat, txt, dbg])
|
420 |
+
cnl.click(fn=cancel_generation, outputs=dbg)
|
421 |
txt.submit(fn=chat_response,
|
422 |
inputs=[txt, chat, sys_prompt, search_chk, mr, mc,
|
423 |
model_dd, max_tok, temp, k, p, rp, st],
|
424 |
outputs=[chat, dbg])
|
|
|
425 |
demo.launch()
|