Spaces:
Build error
Build error
Farid Karimli
commited on
Commit
·
9d89b34
1
Parent(s):
4de6b1a
Streaming on/off switch
Browse files- code/main.py +30 -14
- code/modules/chat/helpers.py +6 -5
- code/modules/chat_processor/chat_processor.py +4 -2
- code/modules/config/config.yml +1 -0
code/main.py
CHANGED
|
@@ -114,6 +114,9 @@ class Chatbot:
|
|
| 114 |
cl.input_widget.Switch(
|
| 115 |
id="view_sources", label="View Sources", initial=False
|
| 116 |
),
|
|
|
|
|
|
|
|
|
|
| 117 |
cl.input_widget.Select(
|
| 118 |
id="llm_style",
|
| 119 |
label="Type of Conversation (Default Normal)",
|
|
@@ -233,26 +236,18 @@ class Chatbot:
|
|
| 233 |
"""
|
| 234 |
await cl.Message(content="Sorry, I have to go now. Goodbye!").send()
|
| 235 |
|
| 236 |
-
async def
|
| 237 |
"""
|
| 238 |
-
|
| 239 |
|
| 240 |
Args:
|
| 241 |
-
|
| 242 |
"""
|
| 243 |
-
chain = cl.user_session.get("chain")
|
| 244 |
-
llm_settings = cl.user_session.get("llm_settings", {})
|
| 245 |
-
view_sources = llm_settings.get("view_sources", False)
|
| 246 |
-
|
| 247 |
-
processor = cl.user_session.get("chat_processor")
|
| 248 |
-
res = await processor.rag(message.content, chain)
|
| 249 |
-
|
| 250 |
-
# TODO: STREAM MESSAGE
|
| 251 |
msg = cl.Message(content="")
|
| 252 |
await msg.send()
|
| 253 |
|
| 254 |
output = {}
|
| 255 |
-
for chunk in
|
| 256 |
if 'answer' in chunk:
|
| 257 |
await msg.stream_token(chunk['answer'])
|
| 258 |
|
|
@@ -261,11 +256,32 @@ class Chatbot:
|
|
| 261 |
output[key] = chunk[key]
|
| 262 |
else:
|
| 263 |
output[key] += chunk[key]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 264 |
|
| 265 |
-
|
|
|
|
| 266 |
|
| 267 |
answer_with_sources, source_elements, sources_dict = get_sources(
|
| 268 |
-
|
| 269 |
)
|
| 270 |
processor._process(message.content, answer, sources_dict)
|
| 271 |
|
|
|
|
| 114 |
cl.input_widget.Switch(
|
| 115 |
id="view_sources", label="View Sources", initial=False
|
| 116 |
),
|
| 117 |
+
cl.input_widget.Switch(
|
| 118 |
+
id="stream_response", label="Stream response", initial=True
|
| 119 |
+
),
|
| 120 |
cl.input_widget.Select(
|
| 121 |
id="llm_style",
|
| 122 |
label="Type of Conversation (Default Normal)",
|
|
|
|
| 236 |
"""
|
| 237 |
await cl.Message(content="Sorry, I have to go now. Goodbye!").send()
|
| 238 |
|
| 239 |
+
async def stream_response(self, response):
|
| 240 |
"""
|
| 241 |
+
Stream the response from the LLM.
|
| 242 |
|
| 243 |
Args:
|
| 244 |
+
response: The response from the LLM.
|
| 245 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 246 |
msg = cl.Message(content="")
|
| 247 |
await msg.send()
|
| 248 |
|
| 249 |
output = {}
|
| 250 |
+
for chunk in response:
|
| 251 |
if 'answer' in chunk:
|
| 252 |
await msg.stream_token(chunk['answer'])
|
| 253 |
|
|
|
|
| 256 |
output[key] = chunk[key]
|
| 257 |
else:
|
| 258 |
output[key] += chunk[key]
|
| 259 |
+
return output
|
| 260 |
+
|
| 261 |
+
async def main(self, message):
|
| 262 |
+
"""
|
| 263 |
+
Process and Display the Conversation.
|
| 264 |
+
|
| 265 |
+
Args:
|
| 266 |
+
message: The incoming chat message.
|
| 267 |
+
"""
|
| 268 |
+
chain = cl.user_session.get("chain")
|
| 269 |
+
llm_settings = cl.user_session.get("llm_settings", {})
|
| 270 |
+
view_sources = llm_settings.get("view_sources", False)
|
| 271 |
+
stream = (llm_settings.get("stream_response", True)) or (not self.config["llm_params"]["stream"])
|
| 272 |
+
print("Streaming", stream)
|
| 273 |
+
|
| 274 |
+
processor = cl.user_session.get("chat_processor")
|
| 275 |
+
res = await processor.rag(message.content, chain, stream)
|
| 276 |
+
|
| 277 |
+
if stream:
|
| 278 |
+
res = await self.stream_response(res)
|
| 279 |
|
| 280 |
+
print(res)
|
| 281 |
+
answer = res.get("answer", res.get("result"))
|
| 282 |
|
| 283 |
answer_with_sources, source_elements, sources_dict = get_sources(
|
| 284 |
+
res, answer, stream=stream, view_sources=view_sources
|
| 285 |
)
|
| 286 |
processor._process(message.content, answer, sources_dict)
|
| 287 |
|
code/modules/chat/helpers.py
CHANGED
|
@@ -2,7 +2,7 @@ from modules.config.prompts import prompts
|
|
| 2 |
import chainlit as cl
|
| 3 |
|
| 4 |
|
| 5 |
-
def get_sources(res, answer, view_sources=False):
|
| 6 |
source_elements = []
|
| 7 |
source_dict = {} # Dictionary to store URL elements
|
| 8 |
|
|
@@ -35,10 +35,11 @@ def get_sources(res, answer, view_sources=False):
|
|
| 35 |
else:
|
| 36 |
source_dict[url_name]["text"] += f"\n\n{source.page_content}"
|
| 37 |
|
| 38 |
-
#
|
| 39 |
-
|
| 40 |
-
#
|
| 41 |
-
|
|
|
|
| 42 |
|
| 43 |
if view_sources:
|
| 44 |
|
|
|
|
| 2 |
import chainlit as cl
|
| 3 |
|
| 4 |
|
| 5 |
+
def get_sources(res, answer, stream=True, view_sources=False):
|
| 6 |
source_elements = []
|
| 7 |
source_dict = {} # Dictionary to store URL elements
|
| 8 |
|
|
|
|
| 35 |
else:
|
| 36 |
source_dict[url_name]["text"] += f"\n\n{source.page_content}"
|
| 37 |
|
| 38 |
+
full_answer = "" # Not to include the answer again if streaming
|
| 39 |
+
|
| 40 |
+
if not stream: # First, display the answer if not streaming
|
| 41 |
+
full_answer = "**Answer:**\n"
|
| 42 |
+
full_answer += answer
|
| 43 |
|
| 44 |
if view_sources:
|
| 45 |
|
code/modules/chat_processor/chat_processor.py
CHANGED
|
@@ -33,7 +33,7 @@ class ChatProcessor:
|
|
| 33 |
else:
|
| 34 |
pass
|
| 35 |
|
| 36 |
-
async def rag(self, user_query: str, chain):
|
| 37 |
user_query_dict = {"input": user_query}
|
| 38 |
# Define the base configuration
|
| 39 |
config = {
|
|
@@ -50,4 +50,6 @@ class ChatProcessor:
|
|
| 50 |
user_query=user_query_dict, config=config, chain=chain
|
| 51 |
)
|
| 52 |
else:
|
| 53 |
-
|
|
|
|
|
|
|
|
|
| 33 |
else:
|
| 34 |
pass
|
| 35 |
|
| 36 |
+
async def rag(self, user_query: str, chain, stream):
|
| 37 |
user_query_dict = {"input": user_query}
|
| 38 |
# Define the base configuration
|
| 39 |
config = {
|
|
|
|
| 50 |
user_query=user_query_dict, config=config, chain=chain
|
| 51 |
)
|
| 52 |
else:
|
| 53 |
+
if stream:
|
| 54 |
+
return chain.stream(user_query=user_query_dict, config=config)
|
| 55 |
+
return chain.invoke(user_query=user_query_dict, config=config)
|
code/modules/config/config.yml
CHANGED
|
@@ -35,6 +35,7 @@ llm_params:
|
|
| 35 |
temperature: 0.7 # float
|
| 36 |
local_llm_params:
|
| 37 |
temperature: 0.7 # float
|
|
|
|
| 38 |
|
| 39 |
chat_logging:
|
| 40 |
log_chat: False # bool
|
|
|
|
| 35 |
temperature: 0.7 # float
|
| 36 |
local_llm_params:
|
| 37 |
temperature: 0.7 # float
|
| 38 |
+
stream: True # bool
|
| 39 |
|
| 40 |
chat_logging:
|
| 41 |
log_chat: False # bool
|