Spaces:
Runtime error
Runtime error
import gradio as gr | |
from utils import * | |
from chains.openai_model import OpenAIModel | |
from config import SEVER, PORT, DEBUG, DEPLOYMENT_ID, SAVE_DIR | |
from vector_db import delete_all, delete_file, handle_upload_file, update_file | |
# Get and load new model | |
def get_model(llm_model_name, temperature=0., top_p=1.0): | |
model = OpenAIModel(llm_model_name=llm_model_name, | |
condense_model_name=llm_model_name, temperature=temperature, top_p=top_p) | |
return model | |
def create_new_model(): | |
return get_model(llm_model_name=DEPLOYMENT_ID) | |
def update_database(files_src): | |
message = handle_upload_file(files_src) | |
saved_file = os.listdir(SAVE_DIR) | |
return gr.update(choices=saved_file), message | |
# Gradio app | |
title = """<h1 align="left" style="min-width:200px; margin-top:6px; white-space: nowrap;">Docs FPT 🤖</h1>""" | |
with gr.Blocks() as demo: | |
user_name = gr.State("") | |
history = gr.State([]) | |
current_model = gr.State(create_new_model) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
gr.HTML(title) | |
status_text = "" | |
status_display = gr.Markdown(status_text, elem_id="status_display") | |
with gr.Row().style(equal_height=True): | |
with gr.Column(scale=5): | |
with gr.Row(): | |
chatbot = gr.Chatbot([], elem_id="chatbot").style(height="100%") | |
with gr.Row(): | |
with gr.Column(min_width=225, scale=12): | |
user_input = gr.Textbox( | |
show_label=False, placeholder="Enter here" | |
).style(container=False) | |
# ask_examples_hidden = gr.Textbox(elem_id="hidden-message") | |
examples_questions = gr.Examples( | |
[ | |
"Bagaimana cara saya memohon sewa gerai?", | |
"Bagaimana cara saya pergi dari Komtar ke Pengkalan Weld?", | |
"Bagaimana cara saya boleh kemaskini Alamat Surat Menyurat Cukai Taksiran", | |
"What is event's permit at Penang?", | |
"How to apply car parking at Penang?", | |
"Where can I request for my event’s permit in Penang?" | |
], | |
[user_input], | |
examples_per_page=6, | |
) | |
with gr.Column(min_width=42, scale=1): | |
submitBtn = gr.Button("Send", variant="primary") | |
with gr.Row(): | |
emptyBtn = gr.Button( | |
"🧹 New conversation", elem_id="empty_btn") | |
retryBtn = gr.Button("🔄 Retry") | |
rec = gr.Button("⏺️Record") | |
record_audio = gr.inputs.Audio(source="microphone", type="filepath") | |
with gr.Row(): | |
gr.Markdown( | |
""" | |
## 💻 Key Feature | |
- Chat with an AI chatbot powered by OpenAI's chat API, using the **content of your research document**. | |
- Get **semantic search** answers from your document using **vector databases**. | |
- Perform a **Google search** within the app | |
- **Verify sources** for all generated results. | |
- Support converting **speech to text** for easy input. | |
### Pine cone | |
Pinecone makes it easy to provide long-term memory for high-performance AI applications. | |
It's a managed, cloud-native vector database with a simple API and no infrastructure hassles. Pinecone serves fresh, filtered query results with low latency at the scale of billions of vectors. | |
https://www.pinecone.io/blog/azure/ | |
### Azure OpenAI Service | |
https://learn.microsoft.com/en-us/legal/cognitive-services/openai/data-privacy | |
## 📧 Contact | |
This tool has been developed by the R&D lab at **QAI** (FPT Software, Ha Noi, Viet Nam) | |
If you have any questions or feature requests, please feel free to reach us out at <b>[email protected]</b>. | |
""" | |
) | |
with gr.Column(min_width=50, scale=1.5): | |
with gr.Tab(label="ChatGPT"): | |
# gr.Markdown(f'<p style="text-align:center">Azure OpenAI Service:<a ' | |
# f'href="https://learn.microsoft.com/en-us/legal/cognitive-services/openai/data-privacy">here</a></p>') | |
index_files = gr.Files(label="Files", type="file", multiple=True) | |
use_websearch = gr.Checkbox(label="Google search", value=False, elem_classes="switch_checkbox") | |
custom_websearch = gr.Checkbox(label="Custom web search", value=False, elem_classes="switch_checkbox") | |
with gr.Tab(label="Configuration"): | |
gr.Markdown( | |
"⚠️Be careful to change ⚠️\n\nIf you can't use it, please restore the default settings") | |
with gr.Accordion("Parameter", open=False): | |
temperature_slider = gr.Slider( | |
minimum=-0, | |
maximum=1.0, | |
value=0.0, | |
step=0.1, | |
interactive=True, | |
label="Temperature", | |
) | |
top_p_slider = gr.Slider( | |
minimum=-0, | |
maximum=1.0, | |
value=1.0, | |
step=0.1, | |
interactive=True, | |
label="Top_p", | |
) | |
user_identifier = gr.Textbox( | |
show_label=True, | |
placeholder="Enter here", | |
label="User name", | |
value=user_name.value, | |
lines=1, | |
) | |
loadHistoryBtn = gr.Button("💾 Load History") | |
with gr.Tab(label="Knowledge DB"): | |
all_files = gr.Dropdown( | |
label="All available files:", multiselect=True, choices=os.listdir(SAVE_DIR), interactive=True | |
) | |
with gr.Column(): | |
delete_btn = gr.Button("🗑️ Delete") | |
with gr.Column(): | |
delete_all_btn = gr.Button("🗑️ Delete all") | |
update_btn = gr.Button("🗑️ Update DB") | |
index_files.change(update_database, [index_files], [all_files, status_display]) | |
delete_all_btn.click(delete_all, None, [all_files, status_display, index_files]) | |
delete_btn.click(delete_file, [all_files], [all_files, status_display, index_files]) | |
update_btn.click(update_file, None, [status_display]) | |
emptyBtn.click( | |
reset, | |
inputs=[current_model], | |
outputs=[chatbot], | |
show_progress=True, | |
) | |
retryBtn.click(retry, [chatbot, current_model, use_websearch, custom_websearch], [chatbot]) | |
loadHistoryBtn.click(load_chat_history, [current_model], [chatbot]) | |
rec.click(transcribe, [current_model, record_audio], [user_input]) | |
user_identifier.change(set_user_indentifier, [current_model, user_identifier], None) | |
user_input.submit(predict, [chatbot, current_model, user_input, use_websearch, custom_websearch], [chatbot, status_display], show_progress=True) | |
user_input.submit(lambda: "", None, user_input) | |
submitBtn.click(predict, [chatbot, current_model, user_input, use_websearch, custom_websearch], [chatbot, status_display], show_progress=True) | |
submitBtn.click(lambda: "", None, user_input) | |
demo.queue(concurrency_count=10).launch( | |
server_name=SEVER, server_port=PORT, debug=DEBUG) | |