import streamlit as st from retriever import load_vector_store from langgraph_graph import generate_answer from time import sleep # Load vector DB db = load_vector_store() st.set_page_config("MedMCQA Chatbot", page_icon="🩺") # 🌗 Theme toggle sidebar with st.sidebar: st.title("🩺 MedMCQA Chatbot") theme_mode = st.radio("🌓 Theme", ["Light", "Dark"], horizontal=True) # 🌓 Apply selected theme if theme_mode == "Dark": st.markdown(""" """, unsafe_allow_html=True) else: st.markdown(""" """, unsafe_allow_html=True) # 🧠 App title st.header("🩺 MedMCQA Chatbot") st.caption("Ask a medical question and get answers from the MedMCQA dataset only. If not found, it will respond gracefully.") # ✏️ Query box query = st.text_input( "🔍 Enter your medical question:", placeholder="e.g., What is the mechanism of Aspirin?", label_visibility="visible" ) # 🚀 Answer generation if query: results = db.similarity_search(query, k=3) context = "\n\n".join([doc.page_content for doc in results]) with st.spinner("🧠 Generating answer..."): response = generate_answer(query, context) st.markdown(""" """, unsafe_allow_html=True) st.markdown("

🧠 Answer:

", unsafe_allow_html=True) answer_placeholder = st.empty() final_text = "" for char in response: final_text += char answer_placeholder.markdown(f"
{final_text}
", unsafe_allow_html=True) sleep(0.01) with st.expander("🔎 Top Matches"): for i, doc in enumerate(results, 1): content = doc.page_content if query.lower() in content.lower(): content = content.replace(query, f"**{query}**") st.markdown(f"**Result {i}:**\n\n{content}") # 📬 Sidebar Contact with st.sidebar: st.markdown("---") st.markdown("### 📬 Contact") st.markdown("[📧 Email](mailto:sankethhonavar25@gmail.com)") st.markdown("[🔗 LinkedIn](https://linkedin.com/in/sankethhonavar)") st.markdown("[💻 GitHub](https://github.com/sankethhonavar)") # ✨ Floating Icons (Right side - Top aligned) st.markdown("""
Email LinkedIn GitHub
""", unsafe_allow_html=True) # 📄 Footer st.markdown(""" ---

Made with ❤️ by Sanketh Honavar

""", unsafe_allow_html=True)