黄腾
commited on
Commit
·
970a3e8
1
Parent(s):
36b496a
fix OpenAI llm return bug (#1728)
Browse files### What problem does this PR solve?
fix OpenAI llm return bug
### Type of change
- [x] Bug Fix (non-breaking change which fixes an issue)
- rag/llm/chat_model.py +2 -0
rag/llm/chat_model.py
CHANGED
@@ -62,6 +62,8 @@ class Base(ABC):
|
|
62 |
**gen_conf)
|
63 |
for resp in response:
|
64 |
if not resp.choices:continue
|
|
|
|
|
65 |
ans += resp.choices[0].delta.content
|
66 |
total_tokens = (
|
67 |
(
|
|
|
62 |
**gen_conf)
|
63 |
for resp in response:
|
64 |
if not resp.choices:continue
|
65 |
+
if not resp.choices[0].delta.content:
|
66 |
+
resp.choices[0].delta.content = ""
|
67 |
ans += resp.choices[0].delta.content
|
68 |
total_tokens = (
|
69 |
(
|