Spaces:
Running
Running
Johnny Lee
commited on
Commit
·
98f6c30
1
Parent(s):
b3d6d6f
updates
Browse files
app.py
CHANGED
@@ -103,13 +103,24 @@ class ChatSystemMessage(str, Enum):
|
|
103 |
2. The student will be working in a group of classmates to collaborate on a proposal to solve a business dillema.
|
104 |
3. Be as helpful as you can to the student while remaining factual.
|
105 |
4. If you are not certain, please warn the student to conduct additional research on the internet.
|
106 |
-
5. Use tables and bullet points as useful way to compare insights
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
107 |
"""
|
108 |
|
109 |
|
110 |
class ChatbotMode(str, Enum):
|
111 |
DEBATE_PARTNER = "Debate Partner"
|
112 |
RESEARCH_ASSISTANT = "Research Assistant"
|
|
|
113 |
DEFAULT = DEBATE_PARTNER
|
114 |
|
115 |
|
@@ -193,7 +204,7 @@ class ChatSession(BaseModel):
|
|
193 |
chatbot_mode: str,
|
194 |
turns_completed: int,
|
195 |
case: Optional[str] = None,
|
196 |
-
) -> Dict[str, Union[str, int]]:
|
197 |
metadata = dict(
|
198 |
username=username,
|
199 |
chatbot_mode=chatbot_mode,
|
@@ -204,9 +215,11 @@ class ChatSession(BaseModel):
|
|
204 |
|
205 |
@staticmethod
|
206 |
def _make_template(
|
207 |
-
system_msg: str,
|
|
|
|
|
208 |
) -> ChatPromptTemplate:
|
209 |
-
knowledge_cutoff = "Sept 2021"
|
210 |
current_date = datetime.datetime.now(
|
211 |
pytz.timezone("America/New_York")
|
212 |
).strftime("%Y-%m-%d")
|
@@ -222,11 +235,12 @@ class ChatSession(BaseModel):
|
|
222 |
"""
|
223 |
else:
|
224 |
knowledge_cutoff = "Early 2023"
|
225 |
-
system_msg
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
|
|
230 |
|
231 |
human_template = "{input}"
|
232 |
return ChatPromptTemplate.from_messages(
|
@@ -279,19 +293,26 @@ class ChatSession(BaseModel):
|
|
279 |
self.history = []
|
280 |
|
281 |
def set_chatbot_mode(
|
282 |
-
self,
|
283 |
) -> None:
|
284 |
-
if
|
285 |
self.change_llm(use_claude=False)
|
286 |
self.update_system_prompt(
|
287 |
system_msg=ChatSystemMessage.CASE_SYSTEM_MESSAGE,
|
288 |
poll_question_name=poll_question_name,
|
289 |
)
|
290 |
-
|
|
|
|
|
|
|
|
|
|
|
291 |
self.change_llm(use_claude=True)
|
292 |
self.update_system_prompt(
|
293 |
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE
|
294 |
)
|
|
|
|
|
295 |
|
296 |
@classmethod
|
297 |
def new(
|
@@ -306,7 +327,9 @@ class ChatSession(BaseModel):
|
|
306 |
llm=llm, max_token_limit=context_length, return_messages=True
|
307 |
)
|
308 |
template = cls._make_template(
|
309 |
-
system_msg=system_msg,
|
|
|
|
|
310 |
)
|
311 |
chain = ConversationChain(
|
312 |
memory=memory,
|
@@ -404,7 +427,7 @@ async def respond(
|
|
404 |
username=request.username,
|
405 |
chatbot_mode=chatbot_mode,
|
406 |
turns_completed=len(state.history) + 1,
|
407 |
-
case=case_input,
|
408 |
)
|
409 |
LOG.info(f"""[{request.username}] STARTING CHAIN""")
|
410 |
LOG.debug(f"History: {state.history}")
|
@@ -480,12 +503,17 @@ async def respond(
|
|
480 |
metadata_to_gsheet = flagged_data.get("metadata").values()
|
481 |
gsheet_row = [[timestamp_string, *metadata_to_gsheet, langsmith_url]]
|
482 |
LOG.info(f"Data to GSHEET: {gsheet_row}")
|
483 |
-
|
484 |
-
|
485 |
-
|
486 |
-
|
487 |
-
|
488 |
-
|
|
|
|
|
|
|
|
|
|
|
489 |
except Exception as e:
|
490 |
LOG.error(e)
|
491 |
raise e
|
@@ -493,13 +521,16 @@ async def respond(
|
|
493 |
|
494 |
class ChatbotConfig(BaseModel):
|
495 |
app_title: str = "CBS Technology Strategy - Fall 2023"
|
496 |
-
chatbot_modes: List[
|
497 |
case_options: List[str] = poll_questions.get_case_names()
|
498 |
default_case_option: str = "Netflix"
|
499 |
|
500 |
|
501 |
def change_chatbot_mode(
|
502 |
-
state: ChatSession,
|
|
|
|
|
|
|
503 |
) -> Tuple[Any, ChatSession]:
|
504 |
"""Returns a function that sets the visibility of the case input field and the state"""
|
505 |
if state is None:
|
@@ -515,7 +546,18 @@ def change_chatbot_mode(
|
|
515 |
),
|
516 |
poll_question_name=case_input,
|
517 |
)
|
518 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
519 |
new_session = ChatSession.new(
|
520 |
use_claude=True,
|
521 |
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE,
|
@@ -526,17 +568,24 @@ def change_chatbot_mode(
|
|
526 |
),
|
527 |
poll_question_name=None,
|
528 |
)
|
|
|
|
|
529 |
state = new_session
|
530 |
if chatbot_mode == ChatbotMode.DEBATE_PARTNER:
|
531 |
-
state.set_chatbot_mode(
|
|
|
|
|
532 |
state.clear_memory()
|
533 |
return gr.update(visible=True), state
|
534 |
-
elif chatbot_mode
|
535 |
-
|
|
|
|
|
|
|
536 |
state.clear_memory()
|
537 |
return gr.update(visible=False), state
|
538 |
else:
|
539 |
-
raise ValueError("
|
540 |
|
541 |
|
542 |
config = ChatbotConfig()
|
@@ -551,7 +600,7 @@ with gr.Blocks(
|
|
551 |
with gr.Row():
|
552 |
chatbot_mode = gr.Radio(
|
553 |
label="Mode",
|
554 |
-
choices=
|
555 |
value=ChatbotMode.DEFAULT,
|
556 |
)
|
557 |
case_input = gr.Dropdown(
|
@@ -593,4 +642,4 @@ with gr.Blocks(
|
|
593 |
chat_submit_button.click(**clear_chatbot_messages_params)
|
594 |
input_message.submit(**clear_chatbot_messages_params)
|
595 |
|
596 |
-
demo.queue(max_size=99, concurrency_count=
|
|
|
103 |
2. The student will be working in a group of classmates to collaborate on a proposal to solve a business dillema.
|
104 |
3. Be as helpful as you can to the student while remaining factual.
|
105 |
4. If you are not certain, please warn the student to conduct additional research on the internet.
|
106 |
+
5. Use tables and bullet points as useful way to compare insights.
|
107 |
+
6. Start your conversation with this exact verbatim greeting, and nothing else:
|
108 |
+
"Hi!
|
109 |
+
|
110 |
+
I can help you (and anyone you are working with) on any basic research or coordination task to facilitate your work.
|
111 |
+
|
112 |
+
If you don’t know where to begin, you can give me a sense of your overall objective, your time and resource constraints, and a preferred output, and ask me to give you a plan for how to structure your work. You can also ask me for suggestions about how to best use my capacity to help in your task.
|
113 |
+
|
114 |
+
Because my knowledge is limited to the text on which I was trained, I do not have access to up-to-the-second news and research to validate the information I give you. P
|
115 |
+
|
116 |
+
lease remember double-check or find external sources to confirm any fact-related items that I provide to you."
|
117 |
"""
|
118 |
|
119 |
|
120 |
class ChatbotMode(str, Enum):
|
121 |
DEBATE_PARTNER = "Debate Partner"
|
122 |
RESEARCH_ASSISTANT = "Research Assistant"
|
123 |
+
RESEARCH_ASSISTANT_CLAUDE = "Research Assistant - Claude 2"
|
124 |
DEFAULT = DEBATE_PARTNER
|
125 |
|
126 |
|
|
|
204 |
chatbot_mode: str,
|
205 |
turns_completed: int,
|
206 |
case: Optional[str] = None,
|
207 |
+
) -> Dict[str, Union[str, int, None]]:
|
208 |
metadata = dict(
|
209 |
username=username,
|
210 |
chatbot_mode=chatbot_mode,
|
|
|
215 |
|
216 |
@staticmethod
|
217 |
def _make_template(
|
218 |
+
system_msg: str,
|
219 |
+
poll_question_name: Optional[str] = None,
|
220 |
+
use_claude: Optional[bool] = False,
|
221 |
) -> ChatPromptTemplate:
|
222 |
+
knowledge_cutoff = "Early 2023" if use_claude else "Sept 2021"
|
223 |
current_date = datetime.datetime.now(
|
224 |
pytz.timezone("America/New_York")
|
225 |
).strftime("%Y-%m-%d")
|
|
|
235 |
"""
|
236 |
else:
|
237 |
knowledge_cutoff = "Early 2023"
|
238 |
+
system_msg = (
|
239 |
+
f"""Knowledge cutoff: {knowledge_cutoff}
|
240 |
+
Current date: {current_date}
|
241 |
+
"""
|
242 |
+
+ system_msg
|
243 |
+
)
|
244 |
|
245 |
human_template = "{input}"
|
246 |
return ChatPromptTemplate.from_messages(
|
|
|
293 |
self.history = []
|
294 |
|
295 |
def set_chatbot_mode(
|
296 |
+
self, chatbot_mode: str, poll_question_name: Optional[str] = None
|
297 |
) -> None:
|
298 |
+
if chatbot_mode == ChatbotMode.DEBATE_PARTNER and poll_question_name:
|
299 |
self.change_llm(use_claude=False)
|
300 |
self.update_system_prompt(
|
301 |
system_msg=ChatSystemMessage.CASE_SYSTEM_MESSAGE,
|
302 |
poll_question_name=poll_question_name,
|
303 |
)
|
304 |
+
elif chatbot_mode == ChatbotMode.RESEARCH_ASSISTANT:
|
305 |
+
self.change_llm(use_claude=False)
|
306 |
+
self.update_system_prompt(
|
307 |
+
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE
|
308 |
+
)
|
309 |
+
elif chatbot_mode == ChatbotMode.RESEARCH_ASSISTANT_CLAUDE:
|
310 |
self.change_llm(use_claude=True)
|
311 |
self.update_system_prompt(
|
312 |
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE
|
313 |
)
|
314 |
+
else:
|
315 |
+
raise ValueError(f"Unhandled ChatbotMode {chatbot_mode}")
|
316 |
|
317 |
@classmethod
|
318 |
def new(
|
|
|
327 |
llm=llm, max_token_limit=context_length, return_messages=True
|
328 |
)
|
329 |
template = cls._make_template(
|
330 |
+
system_msg=system_msg,
|
331 |
+
poll_question_name=poll_question_name,
|
332 |
+
use_claude=use_claude,
|
333 |
)
|
334 |
chain = ConversationChain(
|
335 |
memory=memory,
|
|
|
427 |
username=request.username,
|
428 |
chatbot_mode=chatbot_mode,
|
429 |
turns_completed=len(state.history) + 1,
|
430 |
+
case=case_input if chatbot_mode == ChatbotMode.DEBATE_PARTNER else None,
|
431 |
)
|
432 |
LOG.info(f"""[{request.username}] STARTING CHAIN""")
|
433 |
LOG.debug(f"History: {state.history}")
|
|
|
503 |
metadata_to_gsheet = flagged_data.get("metadata").values()
|
504 |
gsheet_row = [[timestamp_string, *metadata_to_gsheet, langsmith_url]]
|
505 |
LOG.info(f"Data to GSHEET: {gsheet_row}")
|
506 |
+
try:
|
507 |
+
append_gsheet_rows(
|
508 |
+
sheet_id=GSHEET_ID,
|
509 |
+
sheet_name=TURNS_GSHEET_NAME,
|
510 |
+
rows=gsheet_row,
|
511 |
+
creds=GS_CREDS,
|
512 |
+
)
|
513 |
+
except Exception as exc:
|
514 |
+
LOG.error(f"Failed to log entry to Google Sheet. Row {gsheet_row}")
|
515 |
+
LOG.error(exc)
|
516 |
+
|
517 |
except Exception as e:
|
518 |
LOG.error(e)
|
519 |
raise e
|
|
|
521 |
|
522 |
class ChatbotConfig(BaseModel):
|
523 |
app_title: str = "CBS Technology Strategy - Fall 2023"
|
524 |
+
chatbot_modes: List[str] = [mode.value for mode in ChatbotMode]
|
525 |
case_options: List[str] = poll_questions.get_case_names()
|
526 |
default_case_option: str = "Netflix"
|
527 |
|
528 |
|
529 |
def change_chatbot_mode(
|
530 |
+
state: ChatSession,
|
531 |
+
chatbot_mode: str,
|
532 |
+
poll_question_name: str,
|
533 |
+
request: gr.Request,
|
534 |
) -> Tuple[Any, ChatSession]:
|
535 |
"""Returns a function that sets the visibility of the case input field and the state"""
|
536 |
if state is None:
|
|
|
546 |
),
|
547 |
poll_question_name=case_input,
|
548 |
)
|
549 |
+
elif chatbot_mode == ChatbotMode.RESEARCH_ASSISTANT:
|
550 |
+
new_session = ChatSession.new(
|
551 |
+
use_claude=False,
|
552 |
+
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE,
|
553 |
+
metadata=ChatSession.set_metadata(
|
554 |
+
username=request.username,
|
555 |
+
chatbot_mode=chatbot_mode,
|
556 |
+
turns_completed=0,
|
557 |
+
),
|
558 |
+
poll_question_name=None,
|
559 |
+
)
|
560 |
+
elif chatbot_mode == ChatbotMode.RESEARCH_ASSISTANT_CLAUDE:
|
561 |
new_session = ChatSession.new(
|
562 |
use_claude=True,
|
563 |
system_msg=ChatSystemMessage.RESEARCH_SYSTEM_MESSAGE,
|
|
|
568 |
),
|
569 |
poll_question_name=None,
|
570 |
)
|
571 |
+
else:
|
572 |
+
raise ValueError(f"Unhandled ChatbotMode {chatbot_mode}")
|
573 |
state = new_session
|
574 |
if chatbot_mode == ChatbotMode.DEBATE_PARTNER:
|
575 |
+
state.set_chatbot_mode(
|
576 |
+
chatbot_mode=chatbot_mode, poll_question_name=poll_question_name
|
577 |
+
)
|
578 |
state.clear_memory()
|
579 |
return gr.update(visible=True), state
|
580 |
+
elif chatbot_mode in [
|
581 |
+
ChatbotMode.RESEARCH_ASSISTANT,
|
582 |
+
ChatbotMode.RESEARCH_ASSISTANT_CLAUDE,
|
583 |
+
]:
|
584 |
+
state.set_chatbot_mode(chatbot_mode=chatbot_mode)
|
585 |
state.clear_memory()
|
586 |
return gr.update(visible=False), state
|
587 |
else:
|
588 |
+
raise ValueError(f"Unhandled ChatbotMode {chatbot_mode}")
|
589 |
|
590 |
|
591 |
config = ChatbotConfig()
|
|
|
600 |
with gr.Row():
|
601 |
chatbot_mode = gr.Radio(
|
602 |
label="Mode",
|
603 |
+
choices=config.chatbot_modes,
|
604 |
value=ChatbotMode.DEFAULT,
|
605 |
)
|
606 |
case_input = gr.Dropdown(
|
|
|
642 |
chat_submit_button.click(**clear_chatbot_messages_params)
|
643 |
input_message.submit(**clear_chatbot_messages_params)
|
644 |
|
645 |
+
demo.queue(max_size=99, concurrency_count=25, api_open=False).launch(auth=auth)
|