Spaces:
Runtime error
Runtime error
import gradio as gr | |
from utils import * | |
from chains.openai_model import OpenAIModel | |
from config import SEVER, PORT, DEBUG, DEPLOYMENT_ID | |
from vector_db import delete_all, delete_file, handle_upload_file, load_files_blob | |
from theme_dropdown import create_theme_dropdown | |
# Get and load new model | |
def get_model(llm_model_name, temperature=0., top_p=1.0): | |
model = OpenAIModel(llm_model_name=llm_model_name, | |
condense_model_name=llm_model_name, temperature=temperature, top_p=top_p) | |
return model | |
def create_new_model(): | |
return get_model(llm_model_name=DEPLOYMENT_ID) | |
def update_database(files_src): | |
message = handle_upload_file(files_src) | |
available_files = load_files_blob() | |
return gr.update(choices=available_files), message | |
def get_available_files(): | |
available_files = load_files_blob() | |
return gr.update(choices=available_files), gr.update(visible=True) | |
def update_example(chatbot, set_save_file_name): | |
from chains.related_question import RelatedQuestion | |
from chains.create_topic import CreateTopic | |
related_question = RelatedQuestion() | |
outputs = chatbot[-1][1].split("<div")[0] | |
res = related_question.predict(inputs=chatbot[-1][0], outputs=outputs) | |
out = list(map(lambda x: x.split('- ')[-1], res.split('\n'))) | |
samples = [[a] for a in out] | |
if len(chatbot) == 1: | |
topic_chain = CreateTopic() | |
topic = topic_chain.predict(inputs=chatbot[-1][0], outputs=outputs) | |
set_save_file_name = topic | |
return chatbot, gr.Dataset.update(samples=samples), samples, set_save_file_name | |
def load_example(example_id, samples): | |
return samples[example_id][0] | |
# Gradio app | |
with open("custom.css", "r", encoding="utf-8") as f: | |
customCSS = f.read() | |
dropdown, js = create_theme_dropdown() | |
head = """ | |
<html lang="en"> | |
<head> | |
<meta charset="utf-8"> | |
<meta name="viewport" content="width=device-width, initial-scale=1"> | |
<title>FPT Bot</title> | |
<link href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-4bw+/aepP/YC94hEpVNVgiZdgIC5+VKNBQNGCHeKRQN+PtmoHDEXuppvnDJzQIu9" crossorigin="anonymous"> | |
</head> | |
<body> | |
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js" integrity="sha384-HwwvtgBNo3bZJJLYd8oVXjrBZt8cqVSpeBNS5n7C8IVInixGAoxmnlMuBnhbgrkm" crossorigin="anonymous"></script> | |
</body> | |
</html> | |
""" | |
checkbox_js = """ | |
async () => { | |
// Select all checkboxes with the class 'svelte-1ojmf70' | |
const checkboxes = document.querySelectorAll('.svelte-1ojmf70[type="checkbox"]'); | |
// Add a click event listener to each checkbox | |
checkboxes.forEach(checkbox => { | |
checkbox.addEventListener('click', function() { | |
// If this checkbox was checked, uncheck all others | |
if (this.checked) { | |
checkboxes.forEach(otherCheckbox => { | |
if (otherCheckbox !== this) { | |
otherCheckbox.checked = false; | |
} | |
}); | |
} | |
}); | |
}); | |
} | |
""" | |
title = """<h1 align="left" style="min-width:200px; margin-top:6px; white-space: nowrap;">AI Assistant π€</h1>""" | |
logo = """ | |
<div class="logo"></div> | |
""" | |
user_input = gr.Textbox() | |
with gr.Blocks(css=customCSS, theme='minatosnow/qaigpt') as demo: | |
samples = gr.State() | |
user_name = gr.State("") | |
history = gr.State([]) | |
current_model = gr.State(create_new_model) | |
gr.HTML(head) | |
with gr.Row(elem_classes="status-div"): | |
with gr.Column(): | |
gr.HTML(logo) | |
user_info = gr.Markdown(value="getting user info...", elem_id="user_info") | |
with gr.Column(): | |
status_text = "" | |
status_display = gr.Markdown(status_text, elem_id="status_display") | |
with gr.Row().style(equal_height=True): | |
with gr.Column(scale=1): | |
with gr.Tab(label="Database"): | |
with gr.Accordion("Upload file", open=True, visible=False) as acc: | |
with gr.Row(): | |
index_files = gr.Files(label="Files", type="file") | |
all_files = gr.Dropdown( | |
label=None, show_label=False, multiselect=True, choices=load_files_blob(), interactive=True | |
) | |
with gr.Row(): | |
with gr.Column(min_width=42, scale=1): | |
delete_btn = gr.Button("", elem_classes="btn btn-del tooltip-btn tooltip-del") | |
with gr.Column(min_width=42, scale=1): | |
delete_all_btn = gr.Button("", elem_classes="btn btn-del-all tooltip-btn tooltip-del-all") | |
upload_files_btn = gr.Checkbox(label="Upload files", value=False, elem_classes="switch_checkbox") | |
local_db = gr.Checkbox(label="Local knowledge DB", value=False, elem_classes="switch_checkbox") | |
custom_websearch = gr.Checkbox(label="FPT web search", value=False, elem_classes="switch_checkbox") | |
local_db.change(None, _js=checkbox_js) | |
upload_files_btn.change(None, _js=checkbox_js) | |
custom_websearch.change(None, _js=checkbox_js) | |
with gr.Tab(label="History"): | |
with gr.Accordion("Save/Load conversation history"): | |
with gr.Column(): | |
with gr.Row(): | |
with gr.Column(scale=6): | |
history_file_dropdown = gr.Dropdown( | |
label="Load conversation from list", | |
choices=get_history_names(plain=True), | |
multiselect=False, | |
container=False, | |
) | |
with gr.Row(): | |
with gr.Column(min_width=42, scale=1): | |
historyRefreshBtn = gr.Button("π Refresh") | |
with gr.Column(min_width=42, scale=1): | |
historyDeleteBtn = gr.Button("ποΈ Delete") | |
with gr.Row(): | |
with gr.Column(scale=6): | |
set_save_file_name = gr.Textbox( | |
show_label=True, | |
placeholder=None, | |
label="Topic (File name)", | |
) | |
with gr.Column(scale=1): | |
saveHistoryBtn = gr.Button("πΎ Save History") | |
with gr.Tab(label="Theme"): | |
toggle_dark = gr.Button(value="Toggle Light/Dark") | |
toggle_dark.click( | |
None, | |
_js=""" | |
() => { | |
document.body.classList.toggle('dark'); | |
} | |
""", | |
) | |
with gr.Column(scale=9): | |
with gr.Row(): | |
chatbot = gr.Chatbot(show_label=False, elem_classes="chatbot", show_share_button=False, height=650) | |
with gr.Row(): | |
examples_questions = gr.Dataset(samples=[], components=[user_input], type="index", | |
elem_classes="examples") | |
with gr.Row(elem_classes="chatrow"): | |
with gr.Column(min_width=225, scale=10): | |
user_input = gr.Textbox(show_label=False, placeholder="Ask me anything...", container=False, | |
elem_classes="user-input") | |
with gr.Column(min_width=42, scale=1): | |
submitBtn = gr.Button("", elem_classes="btn btn-send tooltip-btn tooltip-content-send") | |
with gr.Column(min_width=42, scale=1): | |
record_audio = gr.Audio(source="microphone", | |
show_label=False, | |
elem_classes="audio-btn btn", | |
type="filepath") | |
with gr.Column(min_width=42, scale=1): | |
emptyBtn = gr.Button( | |
"", elem_classes="btn btn-clear tooltip-btn tooltip-content-clear") | |
with gr.Row(elem_classes="footer"): | |
gr.HTML("""<footer>π€ QGPT β Developed by FPT.QAI</footer>""") | |
def create_greeting(request: gr.Request): | |
if hasattr(request, "username") and request.username: # is not None or is not "" | |
print(f"User Name: {request.username}") | |
user_info, user_name = gr.Markdown.update(value=f"Hi {request.username}!"), request.username | |
else: | |
user_info, user_name = gr.Markdown.update(value="", visible=False), "" | |
current_model = get_model(llm_model_name=DEPLOYMENT_ID) | |
current_model.set_user_identifier(user_name) | |
return user_info, user_name, current_model, get_history_names(False, user_name) | |
demo.load(create_greeting, inputs=None, outputs=[user_info, user_name, current_model, history_file_dropdown]) | |
examples_questions.click(load_example, inputs=[examples_questions, samples], outputs=[user_input]) | |
index_files.change(update_database, [index_files], [all_files, status_display]) | |
upload_files_btn.change(get_available_files, None, [all_files, acc]) | |
delete_all_btn.click(delete_all, None, [all_files, status_display, index_files]) | |
delete_btn.click(delete_file, [all_files], [all_files, status_display, index_files]) | |
# update_btn.click(update_fb, None, [status_display]) | |
emptyBtn.click( | |
reset, | |
inputs=[current_model], | |
outputs=[chatbot], | |
show_progress=True, | |
) | |
# retryBtn.click(retry, [chatbot, current_model, use_websearch, custom_websearch], [chatbot, status_display]) | |
saveHistoryBtn.click(save_chat_history, [current_model, chatbot, set_save_file_name], [status_display]) | |
historyRefreshBtn.click(get_history_names, [gr.State(False), user_name], [history_file_dropdown]) | |
historyDeleteBtn.click(delete_chat_history, [current_model, history_file_dropdown], | |
[status_display, history_file_dropdown, chatbot]) | |
history_file_dropdown.change(load_chat_history, [current_model, history_file_dropdown], | |
[set_save_file_name, chatbot]) | |
record_audio.start_recording(None, None, None, | |
_js=""" | |
async () => { | |
document.querySelectorAll('.sm.secondary').forEach(function(element) { | |
element.classList.remove('secondary'); | |
element.classList.add('tertiary'); | |
}); | |
} | |
""" | |
) | |
record_audio.stop_recording(transcribe, [current_model, record_audio], [user_input, record_audio]) | |
# user_identifier.change(set_user_identifier, [current_model, user_identifier], None) | |
user_input.submit(predict, [chatbot, current_model, user_input, upload_files_btn, custom_websearch, local_db], | |
[chatbot, status_display], show_progress=True).then(update_example, [chatbot, set_save_file_name], | |
[chatbot, examples_questions, samples, | |
set_save_file_name]) | |
user_input.submit(lambda: "", None, user_input) | |
submitBtn.click(predict, [chatbot, current_model, user_input, upload_files_btn, custom_websearch, local_db], | |
[chatbot, status_display], show_progress=True).then(update_example, [chatbot, set_save_file_name], | |
[chatbot, examples_questions, samples, | |
set_save_file_name]) | |
submitBtn.click(lambda: "", None, user_input) | |
demo.queue(concurrency_count=10).launch( | |
server_name=SEVER, server_port=PORT, auth=get_auth(), debug=DEBUG) | |