Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -23,20 +23,23 @@ def respond(
|
|
23 |
|
24 |
messages.append({"role": "user", "content": message})
|
25 |
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
|
|
|
|
36 |
token = message.choices[0].delta.content
|
37 |
response += token
|
38 |
yield response
|
39 |
|
|
|
40 |
# Create a custom ChatInterface with additional inputs and an interactive title
|
41 |
demo = gr.ChatInterface(
|
42 |
fn=respond,
|
|
|
23 |
|
24 |
messages.append({"role": "user", "content": message})
|
25 |
|
26 |
+
response = ""
|
27 |
+
|
28 |
+
# Stream the response from the model
|
29 |
+
for message in client.chat_completion(
|
30 |
+
messages,
|
31 |
+
max_tokens=max_tokens,
|
32 |
+
stream=True,
|
33 |
+
temperature=temperature,
|
34 |
+
top_p=top_p,
|
35 |
+
):
|
36 |
+
# Ensure that the chunk has the required fields
|
37 |
+
if 'choices' in message and len(message.choices) > 0 and 'delta' in message.choices[0] and 'content' in message.choices[0].delta:
|
38 |
token = message.choices[0].delta.content
|
39 |
response += token
|
40 |
yield response
|
41 |
|
42 |
+
|
43 |
# Create a custom ChatInterface with additional inputs and an interactive title
|
44 |
demo = gr.ChatInterface(
|
45 |
fn=respond,
|