Fixed increase_usage for builtin models (#3748)
Browse files### What problem does this PR solve?
Fixed increase_usage for builtin models. Close #1803
### Type of change
- [x] Bug Fix (non-breaking change which fixes an issue)
api/db/services/llm_service.py
CHANGED
@@ -172,12 +172,17 @@ class TenantLLMService(CommonService):
|
|
172 |
|
173 |
num = 0
|
174 |
try:
|
175 |
-
|
176 |
-
|
177 |
-
|
|
|
|
|
178 |
.execute()
|
179 |
-
|
180 |
-
|
|
|
|
|
|
|
181 |
return num
|
182 |
|
183 |
@classmethod
|
|
|
172 |
|
173 |
num = 0
|
174 |
try:
|
175 |
+
tenant_llms = cls.query(tenant_id=tenant_id, llm_name=llm_name)
|
176 |
+
if tenant_llms:
|
177 |
+
tenant_llm = tenant_llms[0]
|
178 |
+
num = cls.model.update(used_tokens=tenant_llm.used_tokens + used_tokens)\
|
179 |
+
.where(cls.model.tenant_id == tenant_id, cls.model.llm_factory == tenant_llm.llm_factory, cls.model.llm_name == llm_name)\
|
180 |
.execute()
|
181 |
+
else:
|
182 |
+
llm_factory = llm_name.split("/")[0] if "/" in llm_name else llm_name
|
183 |
+
num = cls.model.create(tenant_id=tenant_id, llm_factory=llm_factory, llm_name=llm_name, used_tokens=used_tokens)
|
184 |
+
except Exception:
|
185 |
+
logging.exception("TenantLLMService.increase_usage got exception")
|
186 |
return num
|
187 |
|
188 |
@classmethod
|