KingNish commited on
Commit
20b047b
·
verified ·
1 Parent(s): ce2ebf6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -79,15 +79,18 @@ def process_chat_streaming(user_message, chatbot_display, messages_list, image_p
79
  stop_strings=["<|endoftext|>", "User:"] # Add stop strings to prevent over-generation
80
  )
81
 
82
- # Create generation kwargs for the thread
83
- generation_kwargs = dict(
84
- inputs,
85
- streamer=streamer,
86
- generation_config=generation_config
 
 
 
 
 
 
87
  )
88
-
89
- # Run generation in a separate thread
90
- thread = Thread(target=model.generate_from_batch, kwargs=generation_kwargs)
91
  thread.start()
92
 
93
  # Yield updates to the Gradio UI
@@ -136,8 +139,6 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue="blue", secondary_hue="neutra
136
  scale=4,
137
  container=False
138
  )
139
- # The submit button is now primarily for show; Enter key is the main way to submit
140
- # but we will wire it up anyway.
141
 
142
  # --- Event Listeners ---
143
 
@@ -146,7 +147,6 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue="blue", secondary_hue="neutra
146
  fn=process_chat_streaming,
147
  inputs=[user_textbox, chatbot_display, messages_list, image_input],
148
  outputs=[chatbot_display, messages_list],
149
- # queue=False # Set queue to False for faster interaction with streaming
150
  )
151
 
152
  # Chain the action to also clear the textbox after submission
@@ -167,4 +167,4 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue="blue", secondary_hue="neutra
167
 
168
 
169
  if __name__ == "__main__":
170
- demo.launch(debug=True, mcp_server=True)
 
79
  stop_strings=["<|endoftext|>", "User:"] # Add stop strings to prevent over-generation
80
  )
81
 
82
+ # *** THE FIX IS HERE ***
83
+ # We must pass 'inputs' as a positional argument for 'batch'
84
+ # and the rest as keyword arguments.
85
+ thread = Thread(
86
+ target=model.generate_from_batch,
87
+ args=[inputs], # Pass `inputs` as the first positional argument ('batch')
88
+ kwargs={ # Pass the rest as keyword arguments
89
+ "generation_config": generation_config,
90
+ "tokenizer": processor.tokenizer,
91
+ "streamer": streamer,
92
+ }
93
  )
 
 
 
94
  thread.start()
95
 
96
  # Yield updates to the Gradio UI
 
139
  scale=4,
140
  container=False
141
  )
 
 
142
 
143
  # --- Event Listeners ---
144
 
 
147
  fn=process_chat_streaming,
148
  inputs=[user_textbox, chatbot_display, messages_list, image_input],
149
  outputs=[chatbot_display, messages_list],
 
150
  )
151
 
152
  # Chain the action to also clear the textbox after submission
 
167
 
168
 
169
  if __name__ == "__main__":
170
+ demo.launch(mcp_server=True)