JobSmithManipulation
commited on
Commit
·
19806c7
1
Parent(s):
3ca15a2
support chat model in huggingface (#2802)
Browse files### What problem does this PR solve?
#2794
### Type of change
- [x] New Feature (non-breaking change which adds functionality)
rag/llm/__init__.py
CHANGED
@@ -107,6 +107,7 @@ ChatModel = {
|
|
107 |
"BaiduYiyan": BaiduYiyanChat,
|
108 |
"Anthropic": AnthropicChat,
|
109 |
"Google Cloud": GoogleChat,
|
|
|
110 |
}
|
111 |
|
112 |
RerankModel = {
|
|
|
107 |
"BaiduYiyan": BaiduYiyanChat,
|
108 |
"Anthropic": AnthropicChat,
|
109 |
"Google Cloud": GoogleChat,
|
110 |
+
"HuggingFace": HuggingFaceChat,
|
111 |
}
|
112 |
|
113 |
RerankModel = {
|
rag/llm/chat_model.py
CHANGED
@@ -104,7 +104,13 @@ class XinferenceChat(Base):
|
|
104 |
if base_url.split("/")[-1] != "v1":
|
105 |
base_url = os.path.join(base_url, "v1")
|
106 |
super().__init__(key, model_name, base_url)
|
107 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
|
109 |
class DeepSeekChat(Base):
|
110 |
def __init__(self, key, model_name="deepseek-chat", base_url="https://api.deepseek.com/v1"):
|
|
|
104 |
if base_url.split("/")[-1] != "v1":
|
105 |
base_url = os.path.join(base_url, "v1")
|
106 |
super().__init__(key, model_name, base_url)
|
107 |
+
class HuggingFaceChat(Base):
|
108 |
+
def __init__(self, key=None, model_name="", base_url=""):
|
109 |
+
if not base_url:
|
110 |
+
raise ValueError("Local llm url cannot be None")
|
111 |
+
if base_url.split("/")[-1] != "v1":
|
112 |
+
base_url = os.path.join(base_url, "v1")
|
113 |
+
super().__init__(key, model_name, base_url)
|
114 |
|
115 |
class DeepSeekChat(Base):
|
116 |
def __init__(self, key, model_name="deepseek-chat", base_url="https://api.deepseek.com/v1"):
|
web/src/pages/user-setting/setting-model/ollama-modal/index.tsx
CHANGED
@@ -54,7 +54,10 @@ const OllamaModal = ({
|
|
54 |
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
|
55 |
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx';
|
56 |
const optionsMap = {
|
57 |
-
HuggingFace: [
|
|
|
|
|
|
|
58 |
Xinference: [
|
59 |
{ value: 'chat', label: 'chat' },
|
60 |
{ value: 'embedding', label: 'embedding' },
|
|
|
54 |
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
|
55 |
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx';
|
56 |
const optionsMap = {
|
57 |
+
HuggingFace: [
|
58 |
+
{ value: 'embedding', label: 'embedding' },
|
59 |
+
{ value: 'chat', label: 'chat' },
|
60 |
+
],
|
61 |
Xinference: [
|
62 |
{ value: 'chat', label: 'chat' },
|
63 |
{ value: 'embedding', label: 'embedding' },
|