youngtsai commited on
Commit
930c56c
·
1 Parent(s): fa91b0f

foxcat_chatbot_avatar_url

Browse files
Files changed (2) hide show
  1. app.py +100 -31
  2. chatbot.py +22 -8
app.py CHANGED
@@ -2051,17 +2051,42 @@ def chat_with_ai(ai_name, password, video_id, user_data, trascript_state, key_mo
2051
  error_msg = "此次對話超過上限(對話一輪10次)"
2052
  raise gr.Error(error_msg)
2053
 
2054
- if not ai_name in ["jutor", "claude3", "groq"]:
2055
- ai_name = "jutor"
2056
-
2057
- if ai_name == "jutor":
2058
- ai_client = ""
2059
- elif ai_name == "claude3":
2060
- ai_client = BEDROCK_CLIENT
2061
- elif ai_name == "groq":
2062
- ai_client = GROQ_CLIENT
2063
- else:
2064
- ai_client = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2065
 
2066
  if isinstance(trascript_state, str):
2067
  simple_transcript = json.loads(trascript_state)
@@ -2088,14 +2113,14 @@ def chat_with_ai(ai_name, password, video_id, user_data, trascript_state, key_mo
2088
  "content_subject": content_subject,
2089
  "content_grade": content_grade,
2090
  "jutor_chat_key": JUTOR_CHAT_KEY,
2091
- "ai_name": ai_name,
2092
  "ai_client": ai_client,
2093
  "instructions": instructions
2094
  }
2095
 
2096
  try:
2097
  chatbot = Chatbot(chatbot_config)
2098
- response_completion = chatbot.chat(user_message, chat_history, socratic_mode, ai_name)
2099
  except Exception as e:
2100
  print(f"Error: {e}")
2101
  response_completion = "學習精靈有點累,請稍後再試!"
@@ -2413,15 +2438,17 @@ def chatbot_select(chatbot_name):
2413
  chatbot_open_ai_visible = gr.update(visible=False)
2414
  chatbot_open_ai_streaming_visible = gr.update(visible=False)
2415
  chatbot_jutor_visible = gr.update(visible=False)
 
2416
 
2417
  if chatbot_name == "chatbot_open_ai":
2418
  chatbot_open_ai_visible = gr.update(visible=True)
2419
  elif chatbot_name == "chatbot_open_ai_streaming":
2420
  chatbot_open_ai_streaming_visible = gr.update(visible=True)
2421
- elif chatbot_name == "chatbot_jutor":
2422
  chatbot_jutor_visible = gr.update(visible=True)
 
2423
 
2424
- return chatbot_select_accordion_visible, chatbot_open_ai_visible, chatbot_open_ai_streaming_visible, chatbot_jutor_visible
2425
 
2426
  # --- Slide mode ---
2427
  def update_slide(direction):
@@ -2635,18 +2662,46 @@ with gr.Blocks(theme=gr.themes.Base(primary_hue=gr.themes.colors.orange, seconda
2635
  chatbot_open_ai_streaming_select_btn = gr.Button("👆選擇【飛特音速】", elem_id="streaming_chatbot_btn", visible=True, variant="primary")
2636
  gr.Markdown(value=streaming_chatbot_description, visible=True)
2637
  with gr.Column(scale=1, variant="panel"):
2638
- jutor_chatbot_avatar_url = "https://storage.googleapis.com/wpassets.junyiacademy.org/1/2019/11/%E5%9B%9B%E6%A0%BC%E6%95%85%E4%BA%8B-04.jpg"
2639
- jutor_chatbot_description = """Hi,我們是【梨梨、麥麥、狐狸貓】,\n
2640
  也可以陪你一起學習本次的內容,有什麼問題都可以問我喔!\n
2641
  🤔 如果你不知道怎麼發問,可以點擊左下方的問題一、問題二、問題三,我會幫你生成問題!\n
2642
  🗣️ 也可以點擊右下方用語音輸入,我會幫你轉換成文字,厲害吧!\n
2643
  🔠 或是直接鍵盤輸入你的問題,我會盡力回答你的問題喔!\n
2644
  💤 精靈們體力都有限,每一次學習只能回答十個問題,請讓我休息一下再問問題喔!\n
2645
  """
2646
- chatbot_jutor_name = gr.State("chatbot_jutor")
2647
- gr.Image(value=jutor_chatbot_avatar_url, height=100, width=100, show_label=False, show_download_button=False)
2648
- chatbot_jutor_select_btn = gr.Button("👆選擇【梨梨、麥麥、狐狸貓】", elem_id="jutor_chatbot_btn", visible=True, variant="primary")
2649
- gr.Markdown(value=jutor_chatbot_description, visible=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2650
 
2651
  with gr.Row("飛特精靈") as chatbot_open_ai:
2652
  with gr.Column():
@@ -2710,11 +2765,15 @@ with gr.Blocks(theme=gr.themes.Base(primary_hue=gr.themes.colors.orange, seconda
2710
  """,
2711
  ]]
2712
  ai_chatbot_bot_avatar = "https://storage.googleapis.com/wpassets.junyiacademy.org/1/2019/11/%E5%9B%9B%E6%A0%BC%E6%95%85%E4%BA%8B-04.jpg"
2713
- ai_name = gr.Dropdown(label="選擇 AI 助理", choices=[
2714
- # ("梨梨","jutor"),
2715
- ("麥麥","claude3"),
2716
- ("狐狸貓","groq")],
2717
- value="claude3"
 
 
 
 
2718
  )
2719
  ai_chatbot = gr.Chatbot(avatar_images=[user_avatar, ai_chatbot_bot_avatar], label="ai_chatbot", show_share_button=False, likeable=True, show_label=False, latex_delimiters=latex_delimiters, value=ai_chatbot_greeting)
2720
  ai_chatbot_socratic_mode_btn = gr.Checkbox(label="蘇格拉底家教助理模式", value=True, visible=False)
@@ -2915,21 +2974,31 @@ with gr.Blocks(theme=gr.themes.Base(primary_hue=gr.themes.colors.orange, seconda
2915
  chatbot_open_ai_select_btn.click(
2916
  chatbot_select,
2917
  inputs=[chatbot_open_ai_name],
2918
- outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor]
2919
  )
2920
  chatbot_open_ai_streaming_select_btn.click(
2921
  chatbot_select,
2922
  inputs=[chatbot_open_ai_streaming_name],
2923
- outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor]
2924
  ).then(
2925
  create_thread_id,
2926
  inputs=[],
2927
  outputs=[streaming_chat_thread_id_state]
2928
  )
2929
- chatbot_jutor_select_btn.click(
 
 
 
 
 
 
 
 
 
 
2930
  chatbot_select,
2931
- inputs=[chatbot_jutor_name],
2932
- outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor]
2933
  )
2934
 
2935
  # OPENAI ASSISTANT CHATBOT 模式
 
2051
  error_msg = "此次對話超過上限(對話一輪10次)"
2052
  raise gr.Error(error_msg)
2053
 
2054
+ if not ai_name in ["foxcat", "lili", "maimai"]:
2055
+ ai_name = "foxcat"
2056
+
2057
+ # if ai_name == "jutor":
2058
+ # ai_client = ""
2059
+ # elif ai_name == "claude3":
2060
+ # ai_client = BEDROCK_CLIENT
2061
+ # elif ai_name == "groq":
2062
+ # ai_client = GROQ_CLIENT
2063
+ # else:
2064
+ # ai_client = ""
2065
+
2066
+ ai_name_clients_model = {
2067
+ "foxcat": {
2068
+ "ai_name": "foxcat",
2069
+ "ai_client": GROQ_CLIENT,
2070
+ "ai_model_name": "groq_llama3",
2071
+ },
2072
+ "lili": {
2073
+ "ai_name": "lili",
2074
+ "ai_client": BEDROCK_CLIENT,
2075
+ "ai_model_name": "claude3",
2076
+ },
2077
+ # "maimai": {
2078
+ # "ai_name": "maimai",
2079
+ # "ai_client": OPEN_AI_CLIENT,
2080
+ # "ai_model_name": "openai",
2081
+ # }
2082
+ "maimai": {
2083
+ "ai_name": "maimai",
2084
+ "ai_client": GROQ_CLIENT,
2085
+ "ai_model_name": "groq_mixtral",
2086
+ }
2087
+ }
2088
+ ai_client = ai_name_clients_model.get(ai_name, "foxcat")["ai_client"]
2089
+ ai_model_name = ai_name_clients_model.get(ai_name, "foxcat")["ai_model_name"]
2090
 
2091
  if isinstance(trascript_state, str):
2092
  simple_transcript = json.loads(trascript_state)
 
2113
  "content_subject": content_subject,
2114
  "content_grade": content_grade,
2115
  "jutor_chat_key": JUTOR_CHAT_KEY,
2116
+ "ai_model_name": ai_model_name,
2117
  "ai_client": ai_client,
2118
  "instructions": instructions
2119
  }
2120
 
2121
  try:
2122
  chatbot = Chatbot(chatbot_config)
2123
+ response_completion = chatbot.chat(user_message, chat_history, socratic_mode, ai_model_name)
2124
  except Exception as e:
2125
  print(f"Error: {e}")
2126
  response_completion = "學習精靈有點累,請稍後再試!"
 
2438
  chatbot_open_ai_visible = gr.update(visible=False)
2439
  chatbot_open_ai_streaming_visible = gr.update(visible=False)
2440
  chatbot_jutor_visible = gr.update(visible=False)
2441
+ ai_name_update = gr.update(value="jutor")
2442
 
2443
  if chatbot_name == "chatbot_open_ai":
2444
  chatbot_open_ai_visible = gr.update(visible=True)
2445
  elif chatbot_name == "chatbot_open_ai_streaming":
2446
  chatbot_open_ai_streaming_visible = gr.update(visible=True)
2447
+ else:
2448
  chatbot_jutor_visible = gr.update(visible=True)
2449
+ ai_name_update = gr.update(value=chatbot_name)
2450
 
2451
+ return chatbot_select_accordion_visible, chatbot_open_ai_visible, chatbot_open_ai_streaming_visible, chatbot_jutor_visible, ai_name_update
2452
 
2453
  # --- Slide mode ---
2454
  def update_slide(direction):
 
2662
  chatbot_open_ai_streaming_select_btn = gr.Button("👆選擇【飛特音速】", elem_id="streaming_chatbot_btn", visible=True, variant="primary")
2663
  gr.Markdown(value=streaming_chatbot_description, visible=True)
2664
  with gr.Column(scale=1, variant="panel"):
2665
+ foxcat_chatbot_avatar_url = "https://storage.googleapis.com/wpassets.junyiacademy.org/1/2020/06/%E7%A7%91%E5%AD%B8%E5%BE%BD%E7%AB%A0-2-150x150.png"
2666
+ foxcat_chatbot_description = """Hi,我是【狐狸貓】,\n
2667
  也可以陪你一起學習本次的內容,有什麼問題都可以問我喔!\n
2668
  🤔 如果你不知道怎麼發問,可以點擊左下方的問題一、問題二、問題三,我會幫你生成問題!\n
2669
  🗣️ 也可以點擊右下方用語音輸入,我會幫你轉換成文字,厲害吧!\n
2670
  🔠 或是直接鍵盤輸入你的問題,我會盡力回答你的問題喔!\n
2671
  💤 精靈們體力都有限,每一次學習只能回答十個問題,請讓我休息一下再問問題喔!\n
2672
  """
2673
+ foxcat_chatbot_name = gr.State("foxcat")
2674
+ gr.Image(value=foxcat_chatbot_avatar_url, height=100, width=100, show_label=False, show_download_button=False)
2675
+ foxcat_chatbot_select_btn = gr.Button("👆選擇【狐狸貓】", visible=True, variant="primary")
2676
+ gr.Markdown(value=foxcat_chatbot_description, visible=True)
2677
+ # 梨梨
2678
+ with gr.Column(scale=1, variant="panel"):
2679
+ lili_chatbot_avatar_url = "https://junyitopicimg.s3.amazonaws.com/live/v1283-new-topic-44-icon.png?v=20230529071206714"
2680
+ lili_chatbot_description = """你好,我是溫柔的【梨梨】, \n
2681
+ 很高興可以在這裡陪伴你學習。如果你有任何疑問,請隨時向我提出哦! \n
2682
+ 🤔 如果你在思考如何提問,可以嘗試點擊下方的「問題一」、「問題二」或「問題三」,我會為你生成一些問題來幫助你啟動思考。 \n
2683
+ 🗣️ 你也可以使用右下角的語音輸入功能,讓我幫你將語音轉化為文字,這樣可以更加方便快捷。\n
2684
+ 🔠 當然,你也可以直接通過鍵盤輸入你的問題,我將盡我所能為你提供答案。\n
2685
+ 💤 請理解,即使是我們這些精靈,也有疲憊的時候,每次學習後我能回答的問題有限。如果達到上限,讓我稍作休息之後再繼續回答你的問題吧!
2686
+ """
2687
+ lili_chatbot_name = gr.State("lili")
2688
+ gr.Image(value=lili_chatbot_avatar_url, height=100, width=100, show_label=False, show_download_button=False)
2689
+ lili_chatbot_select_btn = gr.Button("👆選擇【梨梨】", visible=True, variant="primary")
2690
+ gr.Markdown(value=lili_chatbot_description, visible=True)
2691
+ # 麥麥
2692
+ with gr.Column(scale=1, variant="panel"):
2693
+ maimai_chatbot_avatar_url = "https://storage.googleapis.com/wpassets.junyiacademy.org/1/2020/07/%E6%80%9D%E8%80%83%E5%8A%9B%E8%B6%85%E4%BA%BA%E5%BE%BD%E7%AB%A0_%E5%B7%A5%E4%BD%9C%E5%8D%80%E5%9F%9F-1-%E8%A4%87%E6%9C%AC-150x150.png"
2694
+ maimai_chatbot_description = """Hi,我是迷人的【麥麥】,\n
2695
+ 我在這裡等著和你一起探索新知,任何疑問都可以向我提出!\n
2696
+ 🤔 如果你不知道從哪裡開始,試試左下方的「問題一」、「問題二」、「問題三」,我會為你提供一些啟發思考的問題。\n
2697
+ 🗣️ 你也可以利用右下角的語音輸入功能,讓我將你的語音轉成文字,是不是很酷?\n
2698
+ 🔠 當然,你也可以直接透過鍵盤向我發問,我會全力以赴來回答你的每一個問題。\n
2699
+ 💤 我們這些精靈也需要休息,每次學習我們只能回答十個問題,當達到上限時,請給我一點時間充電再繼續。\n
2700
+ """
2701
+ maimai_chatbot_name = gr.State("maimai")
2702
+ gr.Image(value=maimai_chatbot_avatar_url, height=100, width=100, show_label=False, show_download_button=False)
2703
+ maimai_chatbot_select_btn = gr.Button("👆選擇【麥麥】", visible=True, variant="primary")
2704
+ gr.Markdown(value=maimai_chatbot_description, visible=True)
2705
 
2706
  with gr.Row("飛特精靈") as chatbot_open_ai:
2707
  with gr.Column():
 
2765
  """,
2766
  ]]
2767
  ai_chatbot_bot_avatar = "https://storage.googleapis.com/wpassets.junyiacademy.org/1/2019/11/%E5%9B%9B%E6%A0%BC%E6%95%85%E4%BA%8B-04.jpg"
2768
+ ai_name = gr.Dropdown(
2769
+ label="選擇 AI 助理",
2770
+ choices=[
2771
+ ("梨梨","lili"),
2772
+ ("麥麥","maimai"),
2773
+ ("狐狸貓","foxcat")
2774
+ ],
2775
+ value="foxcat",
2776
+ visible=False
2777
  )
2778
  ai_chatbot = gr.Chatbot(avatar_images=[user_avatar, ai_chatbot_bot_avatar], label="ai_chatbot", show_share_button=False, likeable=True, show_label=False, latex_delimiters=latex_delimiters, value=ai_chatbot_greeting)
2779
  ai_chatbot_socratic_mode_btn = gr.Checkbox(label="蘇格拉底家教助理模式", value=True, visible=False)
 
2974
  chatbot_open_ai_select_btn.click(
2975
  chatbot_select,
2976
  inputs=[chatbot_open_ai_name],
2977
+ outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor, ai_name]
2978
  )
2979
  chatbot_open_ai_streaming_select_btn.click(
2980
  chatbot_select,
2981
  inputs=[chatbot_open_ai_streaming_name],
2982
+ outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor, ai_name]
2983
  ).then(
2984
  create_thread_id,
2985
  inputs=[],
2986
  outputs=[streaming_chat_thread_id_state]
2987
  )
2988
+ foxcat_chatbot_select_btn.click(
2989
+ chatbot_select,
2990
+ inputs=[foxcat_chatbot_name],
2991
+ outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor, ai_name]
2992
+ )
2993
+ lili_chatbot_select_btn.click(
2994
+ chatbot_select,
2995
+ inputs=[lili_chatbot_name],
2996
+ outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor, ai_name]
2997
+ )
2998
+ maimai_chatbot_select_btn.click(
2999
  chatbot_select,
3000
+ inputs=[maimai_chatbot_name],
3001
+ outputs=[chatbot_select_accordion, chatbot_open_ai, chatbot_open_ai_streaming, chatbot_jutor, ai_name]
3002
  )
3003
 
3004
  # OPENAI ASSISTANT CHATBOT 模式
chatbot.py CHANGED
@@ -39,10 +39,11 @@ class Chatbot:
39
  return key_moments_text
40
 
41
 
42
- def chat(self, user_message, chat_history, socratic_mode=False, service_type='jutor'):
43
  messages = self.prepare_messages(chat_history, user_message)
44
  system_prompt = self.instructions
45
- if service_type in ['jutor', 'groq', 'claude3']:
 
46
  response_text = self.chat_with_service(service_type, system_prompt, messages)
47
  return response_text
48
  else:
@@ -66,10 +67,12 @@ class Chatbot:
66
  return messages
67
 
68
  def chat_with_service(self, service_type, system_prompt, messages):
69
- if service_type == 'jutor':
70
  return self.chat_with_jutor(system_prompt, messages)
71
- elif service_type == 'groq':
72
- return self.chat_with_groq(system_prompt, messages)
 
 
73
  elif service_type == 'claude3':
74
  return self.chat_with_claude3(system_prompt, messages)
75
  else:
@@ -83,6 +86,8 @@ class Chatbot:
83
  "x-api-key": self.jutor_chat_key,
84
  }
85
  model = "gpt-4-turbo"
 
 
86
  # model = "gpt-3.5-turbo-0125"
87
  data = {
88
  "data": {
@@ -99,11 +104,19 @@ class Chatbot:
99
  response_completion = response_data['data']['choices'][0]['message']['content'].strip()
100
  return response_completion
101
 
102
- def chat_with_groq(self, system_prompt, messages):
103
  # system_prompt insert to messages 的最前面 {"role": "system", "content": system_prompt}
104
  messages.insert(0, {"role": "system", "content": system_prompt})
 
 
 
 
 
 
 
 
105
  request_payload = {
106
- "model": "mixtral-8x7b-32768",
107
  "messages": messages,
108
  "max_tokens": 500 # 設定一個較大的值,可根據需要調整
109
  }
@@ -118,6 +131,8 @@ class Chatbot:
118
 
119
  model_id = "anthropic.claude-3-sonnet-20240229-v1:0"
120
  # model_id = "anthropic.claude-3-haiku-20240307-v1:0"
 
 
121
  kwargs = {
122
  "modelId": model_id,
123
  "contentType": "application/json",
@@ -129,7 +144,6 @@ class Chatbot:
129
  "messages": messages
130
  })
131
  }
132
- print(messages)
133
  # 建立 message API,讀取回應
134
  bedrock_client = self.ai_client
135
  response = bedrock_client.invoke_model(**kwargs)
 
39
  return key_moments_text
40
 
41
 
42
+ def chat(self, user_message, chat_history, socratic_mode=False, service_type='openai'):
43
  messages = self.prepare_messages(chat_history, user_message)
44
  system_prompt = self.instructions
45
+ service_type_list = ['openai', 'claude3', 'groq_llama3', 'groq_mixtral']
46
+ if service_type in service_type_list:
47
  response_text = self.chat_with_service(service_type, system_prompt, messages)
48
  return response_text
49
  else:
 
67
  return messages
68
 
69
  def chat_with_service(self, service_type, system_prompt, messages):
70
+ if service_type == 'openai':
71
  return self.chat_with_jutor(system_prompt, messages)
72
+ elif service_type == 'groq_llama3':
73
+ return self.chat_with_groq(service_type, system_prompt, messages)
74
+ elif service_type == 'groq_mixtral':
75
+ return self.chat_with_groq(service_type, system_prompt, messages)
76
  elif service_type == 'claude3':
77
  return self.chat_with_claude3(system_prompt, messages)
78
  else:
 
86
  "x-api-key": self.jutor_chat_key,
87
  }
88
  model = "gpt-4-turbo"
89
+ print("======model======")
90
+ print(model)
91
  # model = "gpt-3.5-turbo-0125"
92
  data = {
93
  "data": {
 
104
  response_completion = response_data['data']['choices'][0]['message']['content'].strip()
105
  return response_completion
106
 
107
+ def chat_with_groq(self, model_name, system_prompt, messages):
108
  # system_prompt insert to messages 的最前面 {"role": "system", "content": system_prompt}
109
  messages.insert(0, {"role": "system", "content": system_prompt})
110
+ model_name_dict = {
111
+ "groq_llama3": "llama3-70b-8192",
112
+ "groq_mixtral": "mixtral-8x7b-32768"
113
+ }
114
+ model = model_name_dict.get(model_name)
115
+ print("======model======")
116
+ print(model)
117
+
118
  request_payload = {
119
+ "model": model,
120
  "messages": messages,
121
  "max_tokens": 500 # 設定一個較大的值,可根據需要調整
122
  }
 
131
 
132
  model_id = "anthropic.claude-3-sonnet-20240229-v1:0"
133
  # model_id = "anthropic.claude-3-haiku-20240307-v1:0"
134
+ print("======model_id======")
135
+ print(model_id)
136
  kwargs = {
137
  "modelId": model_id,
138
  "contentType": "application/json",
 
144
  "messages": messages
145
  })
146
  }
 
147
  # 建立 message API,讀取回應
148
  bedrock_client = self.ai_client
149
  response = bedrock_client.invoke_model(**kwargs)