|
import gradio as gr |
|
|
|
|
|
def create_demo(): |
|
with gr.Blocks(title="LLAMA 3 Rag on Fly", theme="Monochrome") as demo: |
|
|
|
gr.Markdown( |
|
""" |
|
## LLAMA 3 Rag on Fly App |
|
This application allows you to experiment with LLAMA 3 8B Instruct model for RAG. |
|
You can adjust various parameters to control the model's output. |
|
""" |
|
) |
|
|
|
with gr.Row(): |
|
|
|
with gr.Column(scale=0.95): |
|
with gr.Row(): |
|
chat_history = gr.Chatbot(value=[], elem_id='chatbot', height=480) |
|
show_img = gr.Image(label='Uploaded PDF', height=480) |
|
|
|
|
|
with gr.Column(scale=0.05): |
|
with gr.Row(): |
|
slider_chunk_size = gr.Slider( |
|
minimum=256, maximum=1024, value=256, label="Chunk Size", elem_id='slider1' |
|
) |
|
with gr.Row(): |
|
slider_overlap_percentage = gr.Slider( |
|
minimum=0, maximum=99, value=50, label="Chunk Overlap Percentage", elem_id='slider2' |
|
) |
|
with gr.Row(): |
|
slider_temp = gr.Slider( |
|
minimum=0, maximum=1, value=0.5, label="Model Temperature", elem_id='slider3' |
|
) |
|
with gr.Row(): |
|
slider_k = gr.Slider( |
|
minimum=1, step=1,maximum=5, value=2, label="Max Chunks in Context", elem_id='slider2' |
|
) |
|
|
|
|
|
with gr.Row(): |
|
with gr.Column(scale=0.60): |
|
text_input = gr.Textbox( |
|
show_label=False, |
|
placeholder="Type here to ask your PDF", |
|
container=False |
|
) |
|
with gr.Column(scale=0.20): |
|
submit_button = gr.Button('Send') |
|
with gr.Column(scale=0.20): |
|
uploaded_pdf = gr.UploadButton("📁 Upload PDF", file_types=[".pdf"], elem_id='upload_pdf') |
|
|
|
return demo, chat_history, show_img, text_input, submit_button, uploaded_pdf, slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k |
|
|
|
if __name__ == '__main__': |
|
demo, chatbot, show_img, text_input, submit_button, uploaded_pdf, slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k = create_demo() |
|
demo.queue() |
|
demo.launch() |
|
|