Tadashi
commited on
fix: update flowsettings
Browse files- flowsettings.py +21 -10
flowsettings.py
CHANGED
|
@@ -90,6 +90,7 @@ KH_VECTORSTORE = {
|
|
| 90 |
}
|
| 91 |
KH_LLMS = {}
|
| 92 |
KH_EMBEDDINGS = {}
|
|
|
|
| 93 |
|
| 94 |
# populate options from config
|
| 95 |
if config("AZURE_OPENAI_API_KEY", default="") and config(
|
|
@@ -189,14 +190,14 @@ KH_LLMS["claude"] = {
|
|
| 189 |
},
|
| 190 |
"default": False,
|
| 191 |
}
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
KH_LLMS["groq"] = {
|
| 201 |
"spec": {
|
| 202 |
"__type__": "kotaemon.llms.ChatOpenAI",
|
|
@@ -210,7 +211,7 @@ KH_LLMS["cohere"] = {
|
|
| 210 |
"spec": {
|
| 211 |
"__type__": "kotaemon.llms.chats.LCCohereChat",
|
| 212 |
"model_name": "command-r-plus-08-2024",
|
| 213 |
-
"api_key": "your-key",
|
| 214 |
},
|
| 215 |
"default": False,
|
| 216 |
}
|
|
@@ -220,7 +221,7 @@ KH_EMBEDDINGS["cohere"] = {
|
|
| 220 |
"spec": {
|
| 221 |
"__type__": "kotaemon.embeddings.LCCohereEmbeddings",
|
| 222 |
"model": "embed-multilingual-v3.0",
|
| 223 |
-
"cohere_api_key": "your-key",
|
| 224 |
"user_agent": "default",
|
| 225 |
},
|
| 226 |
"default": False,
|
|
@@ -233,6 +234,16 @@ KH_EMBEDDINGS["cohere"] = {
|
|
| 233 |
# "default": False,
|
| 234 |
# }
|
| 235 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 236 |
KH_REASONINGS = [
|
| 237 |
"ktem.reasoning.simple.FullQAPipeline",
|
| 238 |
"ktem.reasoning.simple.FullDecomposeQAPipeline",
|
|
|
|
| 90 |
}
|
| 91 |
KH_LLMS = {}
|
| 92 |
KH_EMBEDDINGS = {}
|
| 93 |
+
KH_RERANKINGS = {}
|
| 94 |
|
| 95 |
# populate options from config
|
| 96 |
if config("AZURE_OPENAI_API_KEY", default="") and config(
|
|
|
|
| 190 |
},
|
| 191 |
"default": False,
|
| 192 |
}
|
| 193 |
+
KH_LLMS["gemini"] = {
|
| 194 |
+
"spec": {
|
| 195 |
+
"__type__": "kotaemon.llms.chats.LCGeminiChat",
|
| 196 |
+
"model_name": "gemini-1.5-pro",
|
| 197 |
+
"api_key": "your-key",
|
| 198 |
+
},
|
| 199 |
+
"default": False,
|
| 200 |
+
}
|
| 201 |
KH_LLMS["groq"] = {
|
| 202 |
"spec": {
|
| 203 |
"__type__": "kotaemon.llms.ChatOpenAI",
|
|
|
|
| 211 |
"spec": {
|
| 212 |
"__type__": "kotaemon.llms.chats.LCCohereChat",
|
| 213 |
"model_name": "command-r-plus-08-2024",
|
| 214 |
+
"api_key": config("COHERE_API_KEY", default="your-key"),
|
| 215 |
},
|
| 216 |
"default": False,
|
| 217 |
}
|
|
|
|
| 221 |
"spec": {
|
| 222 |
"__type__": "kotaemon.embeddings.LCCohereEmbeddings",
|
| 223 |
"model": "embed-multilingual-v3.0",
|
| 224 |
+
"cohere_api_key": config("COHERE_API_KEY", default="your-key"),
|
| 225 |
"user_agent": "default",
|
| 226 |
},
|
| 227 |
"default": False,
|
|
|
|
| 234 |
# "default": False,
|
| 235 |
# }
|
| 236 |
|
| 237 |
+
# default reranking models
|
| 238 |
+
KH_RERANKINGS["cohere"] = {
|
| 239 |
+
"spec": {
|
| 240 |
+
"__type__": "kotaemon.rerankings.CohereReranking",
|
| 241 |
+
"model_name": "rerank-multilingual-v2.0",
|
| 242 |
+
"cohere_api_key": config("COHERE_API_KEY", default="your-key"),
|
| 243 |
+
},
|
| 244 |
+
"default": True,
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
KH_REASONINGS = [
|
| 248 |
"ktem.reasoning.simple.FullQAPipeline",
|
| 249 |
"ktem.reasoning.simple.FullDecomposeQAPipeline",
|