File size: 1,782 Bytes
91d2b07
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import streamlit as st
from langserve.client import RemoteRunnable

def get_response(user_input, url, username):
    response_placeholder = st.empty()
    full_response = ""
    chain = RemoteRunnable(url)
    stream = chain.stream(input={'question': user_input, 'username': username})
    for chunk in stream:
        full_response += chunk
        response_placeholder.markdown(full_response)

    return full_response

def chat_interface(chat_title, page_hash ,url):
    st.title(chat_title)

    # Add username input at the top of the page
    username = st.text_input("Enter your username:", key="username_input", value="Guest")

    # Initialize page-specific chat history
    if "chat_histories" not in st.session_state:
        st.session_state.chat_histories = {}
    
    if page_hash not in st.session_state.chat_histories:
        st.session_state.chat_histories[page_hash] = []

    # Display chat messages from history for the current page
    for message in st.session_state.chat_histories[page_hash]:
        with st.chat_message(message["role"]):
            st.markdown(message["content"])

    # React to user input
    if prompt := st.chat_input("What is your message?"):
        # Display user message in chat message container
        st.chat_message("user").markdown(prompt)
        # Add user message to chat history
        st.session_state.chat_histories[page_hash].append({"role": "user", "content": prompt})

        # Get streaming response
        with st.chat_message("assistant"):
            full_response = get_response(prompt, url, username)

        # Add assistant response to chat history
        st.session_state.chat_histories[page_hash].append({"role": "assistant", "content": full_response})

if __name__ == "__main__":
    chat_interface()