8bitnand
Added support for streamlit and rag model
871255a
raw
history blame
1.43 kB
import sys
import streamlit as st
from google import SemanticSearch, GoogleSearch, Document
from model import RAGModel, load_configs
def run_on_start():
global r
global configs
configs = load_configs(config_file="rag.configs.yml")
r = RAGModel(configs)
def search(query):
g = GoogleSearch(query)
data = g.all_page_data
d = Document(data, min_char_len=configs["document"]["min_char_length"])
st.session_state.doc = d.doc()[0]
st.title("LLM powred Google search")
if "messages" not in st.session_state:
run_on_start()
st.session_state.messages = []
if "doc" not in st.session_state:
st.session_state.doc = None
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Search Here insetad of Google"):
st.chat_message("user").markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
search(prompt)
s = SemanticSearch(
prompt,
st.session_state.doc,
configs["model"]["embeding_model"],
configs["model"]["device"],
)
topk = s.semantic_search(query=prompt, k=32)
output = r.answer_query(query=prompt, topk_items=topk)
response = output
with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})