Asilbek14 commited on
Commit
08ea239
·
verified ·
1 Parent(s): 0a2169b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -8
app.py CHANGED
@@ -17,6 +17,7 @@ client = InferenceClient(MODEL_REPO)
17
 
18
  # ---------------- CHAT FUNCTION ----------------
19
  def stream_response(message, chat_history, system_message, max_tokens, temperature, top_p, response_style):
 
20
  if response_style == "Concise":
21
  system_message += " Keep answers short and direct."
22
  elif response_style == "Detailed":
@@ -24,10 +25,8 @@ def stream_response(message, chat_history, system_message, max_tokens, temperatu
24
  elif response_style == "Essay":
25
  system_message += " Write long, structured, essay-style responses."
26
 
27
- messages = [{"role": "system", "content": system_message}]
28
- for user, bot in chat_history:
29
- messages.append({"role": "user", "content": user})
30
- messages.append({"role": "assistant", "content": bot})
31
  messages.append({"role": "user", "content": message})
32
 
33
  response = ""
@@ -40,7 +39,11 @@ def stream_response(message, chat_history, system_message, max_tokens, temperatu
40
  ):
41
  token = msg.choices[0].delta.content or ""
42
  response += token
43
- yield "", chat_history + [(message, response)]
 
 
 
 
44
 
45
 
46
  # ---------------- UI ----------------
@@ -54,7 +57,7 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet", secondary_hue="pink"))
54
  )
55
 
56
  chatbot = gr.Chatbot(
57
- type="messages", # ✅ new format
58
  height=500,
59
  show_copy_button=True,
60
  label="Chat"
@@ -95,7 +98,7 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet", secondary_hue="pink"))
95
  [msg, chatbot, system_prompt, max_tokens, temperature, top_p, response_style],
96
  [msg, chatbot]
97
  )
98
- clear_btn.click(lambda: None, None, chatbot, queue=False)
99
 
100
  if __name__ == "__main__":
101
- demo.launch()
 
17
 
18
  # ---------------- CHAT FUNCTION ----------------
19
  def stream_response(message, chat_history, system_message, max_tokens, temperature, top_p, response_style):
20
+ # adjust style
21
  if response_style == "Concise":
22
  system_message += " Keep answers short and direct."
23
  elif response_style == "Detailed":
 
25
  elif response_style == "Essay":
26
  system_message += " Write long, structured, essay-style responses."
27
 
28
+ # build conversation
29
+ messages = [{"role": "system", "content": system_message}] + chat_history
 
 
30
  messages.append({"role": "user", "content": message})
31
 
32
  response = ""
 
39
  ):
40
  token = msg.choices[0].delta.content or ""
41
  response += token
42
+ # yield new history in messages format
43
+ yield "", chat_history + [
44
+ {"role": "user", "content": message},
45
+ {"role": "assistant", "content": response}
46
+ ]
47
 
48
 
49
  # ---------------- UI ----------------
 
57
  )
58
 
59
  chatbot = gr.Chatbot(
60
+ type="messages", # ✅ use messages format
61
  height=500,
62
  show_copy_button=True,
63
  label="Chat"
 
98
  [msg, chatbot, system_prompt, max_tokens, temperature, top_p, response_style],
99
  [msg, chatbot]
100
  )
101
+ clear_btn.click(lambda: [], None, chatbot, queue=False)
102
 
103
  if __name__ == "__main__":
104
+ demo.launch()