Update app.py
Browse files
app.py
CHANGED
@@ -237,7 +237,6 @@ def add_message(history, message):
|
|
237 |
global chat_image_num
|
238 |
if not history:
|
239 |
history = []
|
240 |
-
if len(history) == 0:
|
241 |
our_chatbot = InferenceDemo(
|
242 |
args, model_path, tokenizer, model, image_processor, context_len
|
243 |
)
|
@@ -262,7 +261,7 @@ def add_message(history, message):
|
|
262 |
|
263 |
print("### Not bigger than one history", history)
|
264 |
print("### Not bigger than one conv", our_chatbot.conversation)
|
265 |
-
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
266 |
else:
|
267 |
for x in message["files"]:
|
268 |
history.append(((x,), None))
|
@@ -271,7 +270,7 @@ def add_message(history, message):
|
|
271 |
|
272 |
print("### Bigger than one history", history)
|
273 |
print("### Bigger than one conv", our_chatbot.conversation)
|
274 |
-
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
275 |
|
276 |
|
277 |
@spaces.GPU
|
@@ -553,8 +552,8 @@ with gr.Blocks(
|
|
553 |
# add_message, [chatbot, chat_input], [chatbot, chat_input]
|
554 |
# ).then(bot, [chatbot,temperature, top_p, max_output_tokens], chatbot, api_name="bot_response").then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
555 |
chat_input.submit(
|
556 |
-
add_message, [chatbot, chat_input], [chatbot, chat_input
|
557 |
-
).then(bot, [
|
558 |
|
559 |
# chatbot.like(print_like_dislike, None, None)
|
560 |
clear_btn.click(
|
|
|
237 |
global chat_image_num
|
238 |
if not history:
|
239 |
history = []
|
|
|
240 |
our_chatbot = InferenceDemo(
|
241 |
args, model_path, tokenizer, model, image_processor, context_len
|
242 |
)
|
|
|
261 |
|
262 |
print("### Not bigger than one history", history)
|
263 |
print("### Not bigger than one conv", our_chatbot.conversation)
|
264 |
+
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
265 |
else:
|
266 |
for x in message["files"]:
|
267 |
history.append(((x,), None))
|
|
|
270 |
|
271 |
print("### Bigger than one history", history)
|
272 |
print("### Bigger than one conv", our_chatbot.conversation)
|
273 |
+
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
274 |
|
275 |
|
276 |
@spaces.GPU
|
|
|
552 |
# add_message, [chatbot, chat_input], [chatbot, chat_input]
|
553 |
# ).then(bot, [chatbot,temperature, top_p, max_output_tokens], chatbot, api_name="bot_response").then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
554 |
chat_input.submit(
|
555 |
+
add_message, [chatbot, chat_input], [chatbot, chat_input]
|
556 |
+
).then(bot, [chatbot, temperature, top_p, max_output_tokens], chatbot, api_name="bot_response").then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
557 |
|
558 |
# chatbot.like(print_like_dislike, None, None)
|
559 |
clear_btn.click(
|