arpita-23's picture
Update app.py
e41ebe5 verified
import os
import json
import sqlite3
from datetime import datetime
import streamlit as st
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_chroma import Chroma
from langchain_groq import ChatGroq
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from deep_translator import GoogleTranslator
# Directory paths and configurations
working_dir = os.path.dirname(os.path.abspath(__file__))
config_data = json.load(open(f"{working_dir}/config.json"))
GROQ_API_KEY = config_data["GROQ_API_KEY"]
os.environ["GROQ_API_KEY"] = GROQ_API_KEY
# Set up the database with check_same_thread=False
def setup_db():
conn = sqlite3.connect("chat_history.db", check_same_thread=False) # Ensure thread-safe connection
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS chat_histories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT,
timestamp TEXT,
day TEXT,
user_message TEXT,
assistant_response TEXT
)
""")
conn.commit()
return conn # Return the connection
# Function to save chat history to SQLite
def save_chat_history(conn, username, timestamp, day, user_message, assistant_response):
cursor = conn.cursor()
cursor.execute("""
INSERT INTO chat_histories (username, timestamp, day, user_message, assistant_response)
VALUES (?, ?, ?, ?, ?)
""", (username, timestamp, day, user_message, assistant_response))
conn.commit()
# Function to set up vectorstore for embeddings
def setup_vectorstore():
embeddings = HuggingFaceEmbeddings()
vectorstore = Chroma(persist_directory="vector_db_dir", embedding_function=embeddings)
return vectorstore
# Function to set up the chatbot chain
def chat_chain(vectorstore):
# Use a currently supported model, such as 'gpt-3.5-turbo' or any other available model.
llm = ChatGroq(model="gpt-3.5-turbo", temperature=0) # Replace with a valid supported model
retriever = vectorstore.as_retriever()
memory = ConversationBufferMemory(
llm=llm,
output_key="answer",
memory_key="chat_history",
return_messages=True
)
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
chain_type="stuff",
memory=memory,
verbose=True,
return_source_documents=True
)
return chain
# Streamlit UI setup
st.set_page_config(page_title="Bhagavad Gita Query Assistant", page_icon="πŸ“š", layout="centered")
st.title("πŸ“š Bhagavad Gita & Yoga Sutras Query Assistant")
st.subheader("Ask questions and explore timeless wisdom!")
# Initialize session state
if "conn" not in st.session_state:
st.session_state.conn = setup_db()
if "username" not in st.session_state:
username = st.text_input("Enter your name to proceed:")
if username:
with st.spinner("Loading chatbot interface... Please wait."):
st.session_state.username = username
st.session_state.chat_history = [] # Initialize empty chat history in memory
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
st.success(f"Welcome, {username}! The chatbot interface is ready.")
else:
username = st.session_state.username
# Language options (30 Indian languages)
languages = [
"English", "Hindi", "Bengali", "Telugu", "Marathi", "Tamil", "Urdu", "Gujarati", "Malayalam", "Kannada",
"Punjabi", "Odia", "Maithili", "Sanskrit", "Santali", "Kashmiri", "Nepali", "Dogri", "Manipuri", "Bodo",
"Sindhi", "Assamese", "Konkani", "Maithili", "Awadhi", "Rajasthani", "Haryanvi", "Bihari", "Chhattisgarhi", "Magahi"
]
# Main interface
if "username" in st.session_state:
st.subheader(f"Hello {username}, start your query below!")
# Language selection for translation
selected_language = st.selectbox("Select the output language", languages, index=languages.index("English"))
# Input options for the user to type or use voice input
input_option = st.radio("Choose Input Method", ("Type your question",))
# Container to hold the chat interface (for scrolling)
chat_container = st.container()
with chat_container:
if "chat_history" in st.session_state:
for message in st.session_state.chat_history:
if message['role'] == 'user':
with st.chat_message("user"):
st.markdown(message["content"])
elif message['role'] == 'assistant':
with st.chat_message("assistant"):
st.markdown(message["content"])
# User input section for typing
user_query = None # Initialize user_query as None
if input_option == "Type your question":
user_query = st.chat_input("Ask AI about Bhagavad Gita or Yoga Sutras:") # Chat input for typing
# If user input is provided, process the query
if user_query:
with st.spinner("Processing your query... Please wait."):
# Save user input to chat history in memory
st.session_state.chat_history.append({"role": "user", "content": user_query})
# Display user's message in chatbot (for UI display)
with st.chat_message("user"):
st.markdown(user_query)
# Get assistant's response from the chain
with st.chat_message("assistant"):
response = st.session_state.conversational_chain({"question": user_query})
assistant_response = response["answer"]
# Save assistant's response to chat history in memory
st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# Format output in JSON
formatted_output = {
"book": "Bhagavad Gita", # or "PYS" for Yoga Sutras
"chapter_number": "2", # Example, replace with actual value from response
"verse_number": "47", # Example, replace with actual value from response
"shloka": "Yoga karmasu kaushalam", # Example, replace with actual shloka from response
"summary": assistant_response,
"commentary": "This is a commentary on the shloka.", # Replace with actual commentary
}
# Save the chat history to the database (SQLite)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
day = datetime.now().strftime("%A") # Get the day of the week (e.g., Monday)
save_chat_history(st.session_state.conn, username, timestamp, day, user_query, assistant_response)
# Translate the assistant's response based on selected language
translator = GoogleTranslator(source="en", target=selected_language.lower())
translated_response = translator.translate(assistant_response)
# Display translated response
st.markdown(f"**Translated Answer ({selected_language}):** {translated_response}")
# Display the formatted output
st.json(formatted_output)
# Clear the input field after the query is processed
st.session_state.user_input = "" # Reset the input field for next use