File size: 5,244 Bytes
fe643f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45d9b08
fe643f6
45d9b08
fe643f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
import marimo

__generated_with = "0.11.9"
app = marimo.App()


@app.cell(hide_code=True)
def _():
    import marimo as mo
    import synalinks

    synalinks.backend.clear_session()
    return mo, synalinks


@app.cell(hide_code=True)
def _(mo):
    mo.md(
        r"""
        # Conversational Applications

        Synalinks is designed to handle conversational applications as well as 
        query-based systems. In the case of a conversational applications, the
        input data model is a list of chat messages, and the output an individual
        chat message. The `Program` is in that case responsible of handling a
        **single conversation turn**.
        """
    )
    return


@app.cell(hide_code=True)
def _(mo):
    mo.md(
        r"""
        Now we can program our application like you would do with any `Program`. For this example,
        we are going to make a very simple chatbot.

        By default, if no data_model/schema is provided to the `Generator` it will output a `ChatMessage` like output.
        If the data model is `None`, then you can enable streaming.

        **Note:** Streaming is disabled during training and should only be used in the **last** `Generator` of your pipeline.
        """
    )
    return


@app.cell
async def _(synalinks):    
    from synalinks.backend import ChatMessage
    from synalinks.backend import ChatRole
    from synalinks.backend import ChatMessages

    language_model = synalinks.LanguageModel(
        model="openai/gpt-4o-mini",
    )

    _x0 = synalinks.Input(data_model=ChatMessages)
    _x1 = await synalinks.Generator(
        language_model=language_model,
        prompt_template=synalinks.chat_prompt_template(),
        streaming=False,  # Marimo chat don't handle streaming yet
    )(_x0)

    program = synalinks.Program(
        inputs=_x0,
        outputs=_x1,
    )

    # Let's plot this program to understand it

    synalinks.utils.plot_program(
        program,
        show_module_names=True,
        show_trainable=True,
        show_schemas=True,
    )
    return ChatMessage, ChatMessages, ChatRole, language_model, program


@app.cell(hide_code=True)
def _(mo):
    mo.md(
        r"""
        ## Running the chatbot inside the notebook

        In this example, we will show you how to run the conversational application inside this reactive notebook.
        """
    )
    return


@app.cell(hide_code=True)
def _(mo):
    openai_api_key = mo.ui.text_area(placeholder="Your OpenAI API key...").form()
    openai_api_key
    return


@app.cell(hide_code=True)
def _(mo, openai_api_key):
    import litellm
    mo.stop(not openai_api_key.value)
    litellm.openai_key = openai_api_key.value
    return


@app.cell(hide_code=True)
def _(ChatMessage, ChatMessages, ChatRole, mo, program):
    mo.stop(not openai_api_key.value, mo.md("Provide your OpenAI API key"))
    
    def cleanup_assistant_message(msg):
        start_tok = '<span class="paragraph">'
        end_tok = "</span>"
        if msg.content.find(start_tok) > 0:
            msg.content = msg.content[msg.content.find(start_tok) + len(start_tok) :]
        if msg.content.find(end_tok, 1) > 0:
            msg.content = msg.content[: msg.content.find(end_tok, 1)]
        return msg

    async def synalinks_program(messages, config):
        chat_history = ChatMessages()
        for msg in messages:
            if msg.role == "user":
                chat_history.messages.append(
                    ChatMessage(
                        role=ChatRole.USER,
                        content=msg.content,
                    )
                )
            else:
                msg = cleanup_assistant_message(msg)
                chat_history.messages.append(
                    ChatMessage(
                        role=ChatRole.ASSISTANT,
                        content=msg.content,
                    )
                )
        result = await program(chat_history)
        return result.get("content")

    chat = mo.ui.chat(synalinks_program)
    chat
    return chat, cleanup_assistant_message, synalinks_program


@app.cell(hide_code=True)
async def _(mo):
    mo.md(
        r"""
        ## Conclusion

        In this notebook, we explored how Synalinks handle conversational applications.
        You have now a solid understanding to create chatbots and conversational agents.

        ### Key Takeaways

        - **Conversational Flow Management**: Synalinks effectively manages conversational 
            applications by handling inputs as a list of chat messages and generating
            individual chat messages as outputs. This structure allows for efficient 
            processing of conversation turns.

        - **Streaming and Real-Time Interaction**: Synalinks supports streaming for 
            real-time interactions, enhancing user engagement. However, streaming is 
            disabled during training and should be used only in the final `Generator`.

        - **Customizable Prompt Templates**: The prompt templates can be tailored to fit
            conversational contexts, guiding the language model to produce coherent and 
            relevant responses.
        """
    )
    return


if __name__ == "__main__":
    app.run()