init
Browse files
app.py
CHANGED
@@ -32,14 +32,29 @@ You will ask the customer a single question at a time, which is relevant, and yo
|
|
32 |
model_path = "gemini-1.5-flash"
|
33 |
FoodSafetyAssistant = genai.GenerativeModel(model_path, system_instruction=system_instruction)
|
34 |
|
|
|
|
|
|
|
35 |
# Define the function to handle the chat
|
36 |
-
def respond(usertxt, chat_history
|
|
|
37 |
chat = FoodSafetyAssistant.start_chat(history=chat_history)
|
|
|
|
|
38 |
response = chat.send_message(usertxt)
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
40 |
|
41 |
# Gradio interface
|
42 |
-
|
|
|
|
|
|
|
|
|
43 |
|
44 |
# Launch the Gradio app
|
45 |
if __name__ == "__main__":
|
|
|
32 |
model_path = "gemini-1.5-flash"
|
33 |
FoodSafetyAssistant = genai.GenerativeModel(model_path, system_instruction=system_instruction)
|
34 |
|
35 |
+
# Track chat history globally
|
36 |
+
chat_history = []
|
37 |
+
|
38 |
# Define the function to handle the chat
|
39 |
+
def respond(usertxt, chat_history):
|
40 |
+
# Initialize chat with the previous history
|
41 |
chat = FoodSafetyAssistant.start_chat(history=chat_history)
|
42 |
+
|
43 |
+
# Get response from the assistant
|
44 |
response = chat.send_message(usertxt)
|
45 |
+
|
46 |
+
# Append both user input and response to the chat history for context in the next interaction
|
47 |
+
chat_history.append({"role": "user", "content": usertxt})
|
48 |
+
chat_history.append({"role": "assistant", "content": response.text})
|
49 |
+
|
50 |
+
return response.text, chat_history
|
51 |
|
52 |
# Gradio interface
|
53 |
+
def gradio_chat(usertxt, chat_history):
|
54 |
+
response, updated_history = respond(usertxt, chat_history)
|
55 |
+
return response, updated_history
|
56 |
+
|
57 |
+
demo = gr.ChatInterface(fn=gradio_chat, inputs="text", outputs=["text", "state"])
|
58 |
|
59 |
# Launch the Gradio app
|
60 |
if __name__ == "__main__":
|