Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,8 @@ def prompt_from_messages(messages):
|
|
9 |
prompt = ''
|
10 |
for message in messages:
|
11 |
prompt += f"<|start_header_id|>{message['role']}<|end_header_id|>\n\n"
|
12 |
-
prompt += f"{message['content']}<|eot_id
|
13 |
-
prompt = prompt[:-10]
|
14 |
return prompt
|
15 |
|
16 |
# Initialize the Llama model
|
@@ -30,7 +30,7 @@ messages = [
|
|
30 |
]
|
31 |
|
32 |
# Function to handle user input and generate a response
|
33 |
-
def chat_with_physics_master(user_input
|
34 |
global messages # Ensure we can modify the global messages variable
|
35 |
|
36 |
# Append user message
|
|
|
9 |
prompt = ''
|
10 |
for message in messages:
|
11 |
prompt += f"<|start_header_id|>{message['role']}<|end_header_id|>\n\n"
|
12 |
+
prompt += f"{message['content']}<|eot_id|>{{}}" # Corrected here
|
13 |
+
prompt = prompt[:-10] # Adjust the slicing accordingly
|
14 |
return prompt
|
15 |
|
16 |
# Initialize the Llama model
|
|
|
30 |
]
|
31 |
|
32 |
# Function to handle user input and generate a response
|
33 |
+
def chat_with_physics_master(user_input):
|
34 |
global messages # Ensure we can modify the global messages variable
|
35 |
|
36 |
# Append user message
|