File size: 5,571 Bytes
64eac63 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
import os
import json
import sqlite3
from datetime import datetime
import streamlit as st
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_chroma import Chroma
from langchain_groq import ChatGroq
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationalRetrievalChain
from vectorize_documents import embeddings
working_dir = os.path.dirname(os.path.abspath(__file__))
config_data = json.load(open(f"{working_dir}/config.json"))
GROQ_API_KEY = config_data["GROQ_API_KEY"]
os.environ["GROQ_API_KEY"]= GROQ_API_KEY
# Set up the database with check_same_thread=False
def setup_db():
conn = sqlite3.connect("chat_history.db", check_same_thread=False) # Ensure thread-safe connection
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS chat_histories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT,
timestamp TEXT,
day TEXT,
user_message TEXT,
assistant_response TEXT
)
""")
conn.commit()
return conn # Return the connection
# Function to save chat history to SQLite
def save_chat_history(conn, username, timestamp, day, user_message, assistant_response):
cursor = conn.cursor()
cursor.execute("""
INSERT INTO chat_histories (username, timestamp, day, user_message, assistant_response)
VALUES (?, ?, ?, ?, ?)
""", (username, timestamp, day, user_message, assistant_response))
conn.commit()
# Function to set up vectorstore for embeddings
def setup_vectorstore():
embeddings = HuggingFaceEmbeddings()
vectorstore = Chroma(persist_directory="House_vectordb", embedding_function=embeddings)
return vectorstore
# Function to set up the chatbot chain
def chat_chain(vectorstore):
llm = ChatGroq(model="llama-3.1-70b-versatile", temperature=0)
retriever = vectorstore.as_retriever()
memory = ConversationBufferMemory(
llm=llm,
output_key="answer",
memory_key="chat_history",
return_messages=True
)
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
chain_type="stuff",
memory=memory,
verbose=True,
return_source_documents=True
)
return chain
# Streamlit UI setup
st.set_page_config(page_title="House.Ai", page_icon="🤖AI", layout="centered")
st.title("🤖 House.Ai")
st.subheader("You can ask your general questions and queries to our AI")
# Step 1: Initialize the connection and check if the user is already logged in
if "conn" not in st.session_state:
st.session_state.conn = setup_db()
if "username" not in st.session_state:
username = st.text_input("Enter your name to proceed:")
if username:
with st.spinner("Loading chatbot interface... Please wait."):
st.session_state.username = username
st.session_state.chat_history = [] # Initialize empty chat history in memory
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
st.success(f"Welcome, {username}! The chatbot interface is ready.")
else:
username = st.session_state.username
# Step 2: Initialize components if not already set
if "conversational_chain" not in st.session_state:
st.session_state.vectorstore = setup_vectorstore()
st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore)
# Step 3: Display the chat history in the UI
if "username" in st.session_state:
st.subheader(f"Hello {username}, start your query below!")
# Display chat history (messages exchanged between user and assistant)
if st.session_state.chat_history:
for message in st.session_state.chat_history:
if message['role'] == 'user':
with st.chat_message("user"):
st.markdown(message["content"])
elif message['role'] == 'assistant':
with st.chat_message("assistant"):
st.markdown(message["content"])
# Input field for the user to type their message
user_input = st.chat_input("Ask AI....")
if user_input:
with st.spinner("Processing your query... Please wait."):
# Save user input to chat history in memory
st.session_state.chat_history.append({"role": "user", "content": user_input})
# Display user's message in chatbot (for UI display)
with st.chat_message("user"):
st.markdown(user_input)
# Get assistant's response from the chain
with st.chat_message("assistant"):
response = st.session_state.conversational_chain({"question": user_input})
assistant_response = response["answer"]
st.markdown(assistant_response)
# Save assistant's response to chat history in memory
st.session_state.chat_history.append({"role": "assistant", "content": assistant_response})
# Save the chat history to the database (SQLite)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
day = datetime.now().strftime("%A") # Get the day of the week (e.g., Monday)
save_chat_history(st.session_state.conn, username, timestamp, day, user_input, assistant_response)
|