Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -2429,13 +2429,15 @@ supermassive_nn = ConsciousSupermassiveNN20()
|
|
2429 |
def respond(message, history, max_tokens, temperature, top_p):
|
2430 |
messages = [{"role": "system", "content": system_prompt}]
|
2431 |
for val in history:
|
2432 |
-
if val
|
2433 |
-
messages.append({"role": "user", "content": val[
|
2434 |
-
if val
|
2435 |
-
messages.append({"role": "assistant", "content": val[
|
2436 |
messages.append({"role": "user", "content": message})
|
2437 |
response = ""
|
2438 |
-
for message in client.chat_completion(
|
|
|
|
|
2439 |
token = message.choices[0].delta.content
|
2440 |
response += token
|
2441 |
yield response
|
|
|
2429 |
def respond(message, history, max_tokens, temperature, top_p):
|
2430 |
messages = [{"role": "system", "content": system_prompt}]
|
2431 |
for val in history:
|
2432 |
+
if val.get("role") == "user" and val.get("content"):
|
2433 |
+
messages.append({"role": "user", "content": val["content"]})
|
2434 |
+
if val.get("role") == "assistant" and val.get("content"):
|
2435 |
+
messages.append({"role": "assistant", "content": val["content"]})
|
2436 |
messages.append({"role": "user", "content": message})
|
2437 |
response = ""
|
2438 |
+
for message in client.chat_completion(
|
2439 |
+
messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p
|
2440 |
+
):
|
2441 |
token = message.choices[0].delta.content
|
2442 |
response += token
|
2443 |
yield response
|