Technologic101 commited on
Commit
9cee9a1
·
1 Parent(s): 7704b24

task: sets primary llm in user session

Browse files
Files changed (1) hide show
  1. src/app.py +14 -10
src/app.py CHANGED
@@ -1,20 +1,11 @@
1
  import chainlit as cl
2
- from langchain_openai import AsyncChatOpenAI
3
  from langchain_core.messages import HumanMessage, SystemMessage
4
  from chains.design_rag import DesignRAG
5
 
6
  # Initialize components
7
  design_rag = DesignRAG()
8
 
9
- llm = AsyncChatOpenAI(
10
- model="gpt-4o-mini",
11
- temperature=0,
12
- streaming=True,
13
- callbacks=[cl.LangchainCallbackHandler()]
14
- )
15
-
16
- conversation_history = []
17
-
18
  # System message focused on design analysis
19
  SYSTEM_MESSAGE = """You are a helpful design assistant that finds and explains design examples.
20
  For every user message, analyze their design preferences and requirements, considering:
@@ -28,6 +19,16 @@ First briefly explain how you understand their requirements, then show the close
28
 
29
  @cl.on_chat_start
30
  async def init():
 
 
 
 
 
 
 
 
 
 
31
 
32
  # init conversation history for each user
33
  cl.user_session.set("conversation_history", [
@@ -39,6 +40,9 @@ async def init():
39
 
40
  @cl.on_message
41
  async def main(message: cl.Message):
 
 
 
42
  conversation_history = cl.user_session.get("conversation_history")
43
  # Add user message to history
44
  conversation_history.append(HumanMessage(content=message.content))
 
1
  import chainlit as cl
2
+ from langchain_openai import ChatOpenAI
3
  from langchain_core.messages import HumanMessage, SystemMessage
4
  from chains.design_rag import DesignRAG
5
 
6
  # Initialize components
7
  design_rag = DesignRAG()
8
 
 
 
 
 
 
 
 
 
 
9
  # System message focused on design analysis
10
  SYSTEM_MESSAGE = """You are a helpful design assistant that finds and explains design examples.
11
  For every user message, analyze their design preferences and requirements, considering:
 
19
 
20
  @cl.on_chat_start
21
  async def init():
22
+ # Initialize LLM with callback handler inside the Chainlit context
23
+ llm = ChatOpenAI(
24
+ model="gpt-4o-mini",
25
+ temperature=0,
26
+ streaming=True,
27
+ callbacks=[cl.LangchainCallbackHandler()]
28
+ )
29
+
30
+ # Store the LLM in the user session
31
+ cl.user_session.set("llm", llm)
32
 
33
  # init conversation history for each user
34
  cl.user_session.set("conversation_history", [
 
40
 
41
  @cl.on_message
42
  async def main(message: cl.Message):
43
+ # Get the LLM from the user session
44
+ llm = cl.user_session.get("llm")
45
+
46
  conversation_history = cl.user_session.get("conversation_history")
47
  # Add user message to history
48
  conversation_history.append(HumanMessage(content=message.content))