luminoussg commited on
Commit
57a76f2
·
verified ·
1 Parent(s): 28f1fca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -26
app.py CHANGED
@@ -74,10 +74,12 @@ def query_model(model_name: str, messages: List[Dict[str, str]]) -> str:
74
  except Exception as e:
75
  return f"{model_name} error: {str(e)}"
76
 
77
- def respond(message: str, history: List[List[str]], session_id: str) -> List[List[str]]:
78
  """Handle sequential model responses with context preservation"""
79
- # Load session history
80
  session = session_manager.load_session(session_id)
 
 
81
 
82
  # Build context from session history
83
  messages = []
@@ -133,38 +135,39 @@ def respond(message: str, history: List[List[str]], session_id: str) -> List[Lis
133
  # Save final session state
134
  session_manager.save_session(session_id, session)
135
 
136
- # Return responses in Gradio chat format
137
- return [[message, response] for response in responses]
138
 
139
- # Create the Gradio interface with session management
140
- with gr.Blocks(title="Multi-LLM Collaboration Chat") as demo:
141
  session_id = gr.State(session_manager.create_session)
142
 
143
- with gr.Row():
144
- gr.Markdown("## Multi-LLM Collaboration Chat")
145
- new_session_btn = gr.Button("🆕 New Session", variant="secondary")
146
 
147
- with gr.Row():
148
- gr.Markdown("A group chat with Qwen2.5-72B, Llama3.3-70B, and Qwen2.5-Coder-32B")
 
149
 
150
- chat_interface = gr.ChatInterface(
151
- respond,
152
- examples=[
153
- ["How can I optimize Python code?"],
154
- ["Explain quantum computing basics"]
155
- ],
156
- additional_inputs=[session_id]
157
- )
158
 
159
- def create_new_session():
160
- new_id = session_manager.create_session()
161
- return new_id, None
 
 
 
 
162
 
163
- new_session_btn.click(
164
- fn=create_new_session,
165
- outputs=[session_id, chat_interface.chatbot],
166
- show_progress=False
167
  )
 
 
 
 
 
168
 
169
  if __name__ == "__main__":
170
  demo.launch(share=True)
 
74
  except Exception as e:
75
  return f"{model_name} error: {str(e)}"
76
 
77
+ def respond(message: str, history: List[List[str]], session_id: str) -> tuple[str, str]:
78
  """Handle sequential model responses with context preservation"""
79
+ # Load or initialize session
80
  session = session_manager.load_session(session_id)
81
+ if not isinstance(session, dict) or "history" not in session:
82
+ session = {"history": []}
83
 
84
  # Build context from session history
85
  messages = []
 
135
  # Save final session state
136
  session_manager.save_session(session_id, session)
137
 
138
+ # Return response as a single tuple for Gradio chat
139
+ return message, "\n\n".join(responses)
140
 
141
+ # Create the Gradio interface
142
+ with gr.Blocks() as demo:
143
  session_id = gr.State(session_manager.create_session)
144
 
145
+ gr.Markdown("## Multi-LLM Collaboration Chat")
146
+ gr.Markdown("A group chat with Qwen2.5-72B, Llama3.3-70B, and Qwen2.5-Coder-32B")
 
147
 
148
+ chatbot = gr.Chatbot()
149
+ msg = gr.Textbox(label="Message")
150
+ clear = gr.Button("Clear")
151
 
152
+ def user(message, history, session_id):
153
+ return "", history + [[message, None]]
 
 
 
 
 
 
154
 
155
+ def bot(history, session_id):
156
+ if history[-1][1] is None:
157
+ message = history[-1][0]
158
+ _, response = respond(message, history[:-1], session_id)
159
+ history[-1][1] = response
160
+ return history
161
+ return history
162
 
163
+ msg.submit(user, [msg, chatbot, session_id], [msg, chatbot]).then(
164
+ bot, [chatbot, session_id], [chatbot]
 
 
165
  )
166
+
167
+ clear.click(lambda: (session_manager.create_session(), None, []),
168
+ None,
169
+ [session_id, msg, chatbot],
170
+ queue=False)
171
 
172
  if __name__ == "__main__":
173
  demo.launch(share=True)