ramji-srotas's picture
Uploading files to have ingestion and all files
5e273da verified
import streamlit as st
import random
import time
from inference import main
import torch
import gc
import os
import json
# Function to clear GPU memory
def clear_gpu_memory():
if torch.cuda.is_available():
torch.cuda.empty_cache()
# Function to clear CPU memory and run garbage collection
def clear_cpu_memory():
gc.collect() # Run garbage collection to clean up unused objects
def response_generator(prompt):
history = []
if os.path.exists('history.json'):
with open('history.json', "r") as f:
history = json.load(f)
bot_response, history = main(prompt,history)
with open('history.json', "w") as f:
json.dump(history, f, indent=4)
clear_gpu_memory()
clear_cpu_memory()
response = random.choice(
[
bot_response
]
)
yield response
st.title("Clinical Trial Information Bot")
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input
if prompt := st.chat_input("You can ask your question's here!!"):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
response = st.write_stream(response_generator(prompt))
st.session_state.messages.append({"role": "assistant", "content": response})