Spaces:
Running
Running
import streamlit as st | |
import os | |
from components.sidebar import render_sidebar | |
from components.chat_ui import display_chat | |
from services.llm import initialize_qa_chain, initialize_chain | |
from utils.helpers import get_file_size | |
# import subprocess | |
# process = subprocess.Popen("ollama serve", shell=True) | |
# print(process) | |
# App Title | |
st.title("DocChatAI | Chat Using Documents") | |
# Sidebar - Model Selection & File Upload | |
selected_model, temperature, top_p, max_tokens, uploaded_file = render_sidebar() | |
mode = False | |
# Check if a PDF file is uploaded | |
if uploaded_file is not None: | |
os.makedirs("docs", exist_ok=True) | |
filepath = os.path.join("docs", uploaded_file.name) | |
with open(filepath, "wb") as temp_file: | |
temp_file.write(uploaded_file.read()) | |
with st.spinner('Please wait...'): | |
qa_chain = initialize_qa_chain(filepath, selected_model, temperature, top_p, max_tokens) | |
mode = True | |
else: | |
qa_chain = initialize_chain(selected_model, temperature, top_p, max_tokens) | |
# Initialize and Display Chat History | |
display_chat(qa_chain, mode) | |