Spaces:
Sleeping
Sleeping
import json | |
import os | |
import uuid | |
from streamlit_feedback import streamlit_feedback | |
import streamlit as st | |
from langchain_openai import ChatOpenAI | |
from langchain_core.messages import HumanMessage | |
from langchain.memory import ChatMessageHistory | |
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | |
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage | |
from langchain_core.prompts import HumanMessagePromptTemplate | |
import tiktoken | |
#from agent import app | |
from crag import crag_app | |
from datetime import timedelta | |
from sqlalchemy import create_engine | |
from cache import (write_to_db, | |
current_time) | |
#load postgres engine | |
engine = create_engine("postgresql://postgres:sampath@localhost:5432/postgres") | |
#load keys | |
os.environ['OPENAI_API_KEY'] = st.secrets["OPENAI_API_KEY"] | |
chat_history = ChatMessageHistory() | |
system_message = '''You are an AI assistant for answering questions about vedas and scriptures. | |
\nYou are given the following extracted documents from Svarupa Knowledge Base (https://svarupa.org/) and other documents and a question. | |
Provide a conversational answer. If there are any unicode characters in the final answer, please encode and provide readable answer to the user. | |
\nIf you are not provided with any documents, say \"I did not get any relevant context for this but | |
I will reply to the best of my knowledge\" and then write your answer\nIf you don't know the answer, just say \"Hmm, I'm not sure. \" Don't try to make up an answer. | |
\nIf the question is not about vedas and scriptures, politely inform them that you are tuned to only answer questions about that.\n\n''' | |
generate_prompt = ChatPromptTemplate.from_messages( | |
[ | |
("system", system_message), | |
("human", "Here is the given context {context}, queation: {question} \n\n Formulate an answer."), | |
] | |
) | |
#@st.cache_resource(show_spinner=False) # Set allow_output_mutation to True for mutable objects like instances | |
def bot_response(user_input): | |
response = crag_app.invoke({"question": user_input}) | |
return response | |
##====== | |
# Main chatbot function | |
def veda_bot(sidebar: bool = True) -> None: | |
# Define custom CSS | |
custom_css = """ | |
<style> | |
/* Adjust the selector as needed */ | |
.stHeadingContainer { | |
margin-top: -100px; /* Reduce the top margin */ | |
} | |
#MainMenu {visibility: hidden;} | |
footer {visibility: hidden;} | |
header {visibility: hidden;} | |
</style> | |
""" | |
# Apply the custom CSS | |
st.markdown(custom_css, unsafe_allow_html=True) | |
# Streamlit Components Initialization | |
st.title("Veda Bot") | |
st.write("This bot is developed based on the content from the [Svarupa](https://svarupa.org/home) website.") | |
chat_history.add_message(SystemMessage(content="Welcome! I am your Veda Bot. How can I assist you today?")) | |
# Initialize session state variables | |
if "messages" not in st.session_state.keys(): | |
st.session_state.messages = [{"role": "assistant", "content": "Hi. I am an AI Assistant. Ask me a question about Vedas!"}] | |
if "session_uuid" not in st.session_state: | |
st.session_state["session_uuid"] = f"{current_time()}-{str(uuid.uuid4())}" | |
if "feedback" not in st.session_state: | |
st.session_state["feedback"] = None | |
if "chat_engine" not in st.session_state.keys(): | |
st.session_state.chat_engine = bot_response | |
if "memory" not in st.session_state: | |
st.session_state["memory"] = ChatMessageHistory() | |
st.session_state["memory"].add_message(generate_prompt) | |
st.session_state["memory"].add_message({"role":"user","content":"Hi/Hello or Any Greating"}) | |
st.session_state["memory"].add_message({"role":"assistant","content":"Hi. Please ask the question about vedas!"}) | |
# Display chat history | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# Get user input | |
prompt = st.chat_input("Enter your question!") | |
if prompt: | |
# Display user message in chat message container | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
# Log user message | |
st.session_state["messages"].append({"role": "user", "content": prompt}) | |
st.session_state["memory"].add_message({"role": "user", "content": prompt}) | |
# Generate bot response | |
if st.session_state.messages[-1]["role"] != "assistant": | |
with st.spinner("Thinking..."): | |
references = [] | |
message_placeholder = st.empty() | |
full_response = "" | |
# Get bot response | |
response_bot = st.session_state.chat_engine(prompt) | |
generation = response_bot['generation'] | |
full_response += generation | |
web_search = response_bot['web_search'] | |
# Extract references from bot response | |
if response_bot['messages']: | |
try: | |
references.extend([doc.metadata['source'] for doc in response_bot['messages']]) | |
except Exception as e: | |
print("Error:", e) | |
#message_placeholder.markdown(full_response + "▌") | |
# Add references to the full response | |
if references: | |
unique_references = set(references) | |
full_response += "\n\n**References:**\n\n" | |
for reference in unique_references: | |
full_response += f"- {reference}\n" | |
#message_placeholder.markdown(full_response + "▌") | |
# Submit Feedback | |
streamlit_feedback( | |
feedback_type="faces", | |
on_submit=None, | |
optional_text_label="[Optional] Please provide an explanation", | |
key="feedback", | |
) | |
message_placeholder.markdown(full_response) | |
st.session_state["messages"].append({"role": "assistant", "content": generation}) | |
st.session_state["memory"].add_message({"role": "assistant", "content": generation}) | |
print(f"Response added to memory: {full_response}") | |
# Log feedback and messages | |
if st.session_state['feedback']: | |
user_feedback ={ | |
"user_message": st.session_state["messages"][-2], | |
"assistant_message": st.session_state["messages"][-1], | |
"feedback_score": st.session_state["feedback"]["score"], | |
"feedback_text": st.session_state["feedback"]["text"], | |
} | |
#write_to_db(u_message=user_feedback["user_message"], | |
# a_message=user_feedback["assistant_message"], | |
# f_score=user_feedback["feedback_score"], | |
# f_text=user_feedback["feedback_text"]) | |
if __name__ == "__main__": | |
veda_bot() |