File size: 4,397 Bytes
a3d26e6
ab998fb
 
ea077e1
72cdb72
0e34878
a3d26e6
 
 
0e34878
 
 
 
 
 
 
 
0d7fb92
0e34878
 
 
 
a3d26e6
ab998fb
 
 
 
3536eb7
 
 
 
 
 
 
 
 
 
 
 
ab998fb
3536eb7
a3d26e6
0e34878
 
 
 
 
 
ab998fb
0e34878
 
 
 
 
 
 
72cdb72
a3d26e6
720c02e
72cdb72
 
 
 
 
 
 
 
 
0e34878
72cdb72
ab998fb
 
 
 
 
 
 
 
 
 
 
 
a3d26e6
 
 
 
 
 
 
ab998fb
 
 
 
 
 
 
 
 
 
 
 
 
0e34878
 
 
72cdb72
 
a3d26e6
72cdb72
720c02e
0e34878
72cdb72
a3d26e6
720c02e
0e34878
 
 
 
 
 
 
 
a3d26e6
72cdb72
a3d26e6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
import streamlit as st
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain.prompts import PromptTemplate
from model import selector
from util import getYamlConfig
from st_copy_to_clipboard import st_copy_to_clipboard

def display_messages():

    for i, message in enumerate(st.session_state.chat_history):
        if isinstance(message, AIMessage):
            with st.chat_message("AI"):
                # Display the model from the kwargs
                model = message.kwargs.get("model", "Unknown Model")  # Get the model, default to "Unknown Model"
                st.write(f"**Model :** {model}")
                st.markdown(message.content)
                st_copy_to_clipboard(message.content,key=f"message_{i}")
                # show_retrieved_documents(st.session_state.chat_history[i-1].content)
        
        elif isinstance(message, HumanMessage):
            with st.chat_message("Moi"):
                st.write(message.content)

        # elif isinstance(message, SystemMessage):
        #     with st.chat_message("System"):
        #         st.write(message.content)

def show_retrieved_documents(query: str = ''):
    if query == '':
        return
    
    # Créer l'expander pour les documents trouvés
    expander = st.expander("Documents trouvés")
    
    # Boucler à travers les documents récupérés
    for item in st.session_state.get("retrived_documents", []):
        if 'query' in item:
            if item["query"] == query:
                for doc in item.get("documents", []):
                    expander.write(doc["metadata"]["source"])


def launchQuery(query: str = None):

    # Initialize the assistant's response
    full_response = st.write_stream(
        st.session_state["assistant"].ask(
            query,
            # prompt_system=st.session_state.prompt_system,
            messages=st.session_state["chat_history"] if "chat_history" in st.session_state else [],
            variables=st.session_state["data_dict"]
        ))

    # Temporary placeholder AI message in chat history
    st.session_state["chat_history"].append(AIMessage(content=full_response, kwargs={"model": st.session_state["assistant"].getReadableModel()}))
    st.rerun()


def show_prompts():
    yaml_data = getYamlConfig()["prompts"]
    
    expander = st.expander("Prompts pré-définis")
    
    for categroy in yaml_data:
        expander.write(categroy.capitalize())

        for item in yaml_data[categroy]:
            if expander.button(item, key=f"button_{item}"):
                launchQuery(item)

def remplir_texte(texte: str, variables: dict) -> str:
    # Convertir les valeurs en chaînes de caractères pour éviter les erreurs avec format()
    variables_str = {key: (', '.join(value) if isinstance(value, list) else value if value else 'Non spécifié')
                     for key, value in variables.items()}
    
    # Remplacer les variables dynamiques dans le texte
    try:
        texte_rempli = texte.format(**variables_str)
    except KeyError as e:
        raise ValueError(f"Clé manquante dans le dictionnaire : {e}")
    
    return texte_rempli

def page():
    st.subheader("Posez vos questions")

    if "assistant" not in st.session_state:
        st.text("Assistant non initialisé")

    if "chat_history" not in st.session_state or len(st.session_state["chat_history"]) == 1:

        print("got here")
        if st.session_state["data_dict"] is not None:
            # Convertir la liste en dictionnaire avec 'key' comme clé et 'value' comme valeur
            vars = {item['key']: item['value'] for item in st.session_state["data_dict"] if 'key' in item and 'value' in item}

        system_template = st.session_state.prompt_system
        full = remplir_texte(system_template, vars)

        st.session_state["chat_history"] = [
            SystemMessage(content=full),
        ]

    st.markdown("<style>iframe{height:50px;}</style>", unsafe_allow_html=True)

    # Collpase for default prompts
    show_prompts()

    # Models selector
    selector.ModelSelector()

    # Displaying messages
    display_messages()


    user_query = st.chat_input("")
    if user_query is not None and user_query != "":

        st.session_state["chat_history"].append(HumanMessage(content=user_query))
        
        # Stream and display response
        launchQuery(user_query)


page()