黄腾 aopstudio commited on
Commit
07dead3
·
1 Parent(s): 174de9f

fix LocalAI add bug (#1851)

Browse files

### What problem does this PR solve?

#1848

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: Zhedong Cen <[email protected]>

rag/llm/chat_model.py CHANGED
@@ -102,7 +102,7 @@ class XinferenceChat(Base):
102
  if not base_url:
103
  raise ValueError("Local llm url cannot be None")
104
  if base_url.split("/")[-1] != "v1":
105
- self.base_url = os.path.join(base_url, "v1")
106
  key = "xxx"
107
  super().__init__(key, model_name, base_url)
108
 
@@ -373,8 +373,8 @@ class LocalAIChat(Base):
373
  if not base_url:
374
  raise ValueError("Local llm url cannot be None")
375
  if base_url.split("/")[-1] != "v1":
376
- self.base_url = os.path.join(base_url, "v1")
377
- self.client = OpenAI(api_key="empty", base_url=self.base_url)
378
  self.model_name = model_name.split("___")[0]
379
 
380
 
 
102
  if not base_url:
103
  raise ValueError("Local llm url cannot be None")
104
  if base_url.split("/")[-1] != "v1":
105
+ base_url = os.path.join(base_url, "v1")
106
  key = "xxx"
107
  super().__init__(key, model_name, base_url)
108
 
 
373
  if not base_url:
374
  raise ValueError("Local llm url cannot be None")
375
  if base_url.split("/")[-1] != "v1":
376
+ base_url = os.path.join(base_url, "v1")
377
+ self.client = OpenAI(api_key="empty", base_url=base_url)
378
  self.model_name = model_name.split("___")[0]
379
 
380
 
rag/llm/embedding_model.py CHANGED
@@ -510,8 +510,8 @@ class LmStudioEmbed(LocalAIEmbed):
510
  if not base_url:
511
  raise ValueError("Local llm url cannot be None")
512
  if base_url.split("/")[-1] != "v1":
513
- self.base_url = os.path.join(base_url, "v1")
514
- self.client = OpenAI(api_key="lm-studio", base_url=self.base_url)
515
  self.model_name = model_name
516
 
517
 
@@ -520,6 +520,6 @@ class OpenAI_APIEmbed(OpenAIEmbed):
520
  if not base_url:
521
  raise ValueError("url cannot be None")
522
  if base_url.split("/")[-1] != "v1":
523
- self.base_url = os.path.join(base_url, "v1")
524
  self.client = OpenAI(api_key=key, base_url=base_url)
525
  self.model_name = model_name.split("___")[0]
 
510
  if not base_url:
511
  raise ValueError("Local llm url cannot be None")
512
  if base_url.split("/")[-1] != "v1":
513
+ base_url = os.path.join(base_url, "v1")
514
+ self.client = OpenAI(api_key="lm-studio", base_url=base_url)
515
  self.model_name = model_name
516
 
517
 
 
520
  if not base_url:
521
  raise ValueError("url cannot be None")
522
  if base_url.split("/")[-1] != "v1":
523
+ base_url = os.path.join(base_url, "v1")
524
  self.client = OpenAI(api_key=key, base_url=base_url)
525
  self.model_name = model_name.split("___")[0]