黄腾 aopstudio commited on
Commit
c6cd38e
·
1 Parent(s): cbe40d3

fix OpenRouter add bug and the way to add OpenRouter model (#2364)

Browse files

### What problem does this PR solve?

#2359 fix OpenRouter add bug and the way to add OpenRouter model

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: Zhedong Cen <[email protected]>

conf/llm_factories.json CHANGED
@@ -937,1034 +937,7 @@
937
  "logo": "",
938
  "tags": "LLM,IMAGE2TEXT",
939
  "status": "1",
940
- "llm": [
941
- {
942
- "llm_name": "nousresearch/hermes-2-theta-llama-3-8b",
943
- "tags": "LLM CHAT 16K",
944
- "max_tokens": 16384,
945
- "model_type": "chat"
946
- },
947
- {
948
- "llm_name": "alpindale/magnum-72b",
949
- "tags": "LLM CHAT 16K",
950
- "max_tokens": 16384,
951
- "model_type": "chat"
952
- },
953
- {
954
- "llm_name": "google/gemma-2-9b-it",
955
- "tags": "LLM CHAT 8K",
956
- "max_tokens": 8192,
957
- "model_type": "chat"
958
- },
959
- {
960
- "llm_name": "google/gemma-2-9b-it:free",
961
- "tags": "LLM CHAT 8K",
962
- "max_tokens": 8192,
963
- "model_type": "chat"
964
- },
965
- {
966
- "llm_name": "sao10k/l3-stheno-8b",
967
- "tags": "LLM CHAT 32K",
968
- "max_tokens": 32000,
969
- "model_type": "chat"
970
- },
971
- {
972
- "llm_name": "openrouter/flavor-of-the-week",
973
- "tags": "LLM CHAT 32K",
974
- "max_tokens": 32000,
975
- "model_type": "chat"
976
- },
977
- {
978
- "llm_name": "ai21/jamba-instruct",
979
- "tags": "LLM CHAT 250K",
980
- "max_tokens": 256000,
981
- "model_type": "chat"
982
- },
983
- {
984
- "llm_name": "nvidia/nemotron-4-340b-instruct",
985
- "tags": "LLM CHAT 4K",
986
- "max_tokens": 4096,
987
- "model_type": "chat"
988
- },
989
- {
990
- "llm_name": "anthropic/claude-3.5-sonnet",
991
- "tags": "LLM IMAGE2TEXT 195K",
992
- "max_tokens": 200000,
993
- "model_type": "image2text"
994
- },
995
- {
996
- "llm_name": "anthropic/claude-3.5-sonnet:beta",
997
- "tags": "LLM IMAGE2TEXT 195K",
998
- "max_tokens": 200000,
999
- "model_type": "image2text"
1000
- },
1001
- {
1002
- "llm_name": "sao10k/l3-euryale-70b",
1003
- "tags": "LLM CHAT 8K",
1004
- "max_tokens": 8192,
1005
- "model_type": "chat"
1006
- },
1007
- {
1008
- "llm_name": "microsoft/phi-3-medium-4k-instruct",
1009
- "tags": "LLM CHAT 4K",
1010
- "max_tokens": 4000,
1011
- "model_type": "chat"
1012
- },
1013
- {
1014
- "llm_name": "cognitivecomputations/dolphin-mixtral-8x22b",
1015
- "tags": "LLM CHAT 64K",
1016
- "max_tokens": 65536,
1017
- "model_type": "chat"
1018
- },
1019
- {
1020
- "llm_name": "qwen/qwen-2-72b-instruct",
1021
- "tags": "LLM CHAT 32K",
1022
- "max_tokens": 32768,
1023
- "model_type": "chat"
1024
- },
1025
- {
1026
- "llm_name": "openchat/openchat-8b",
1027
- "tags": "LLM CHAT 8K",
1028
- "max_tokens": 8192,
1029
- "model_type": "chat"
1030
- },
1031
- {
1032
- "llm_name": "mistralai/mistral-7b-instruct",
1033
- "tags": "LLM CHAT 32K",
1034
- "max_tokens": 32768,
1035
- "model_type": "chat"
1036
- },
1037
- {
1038
- "llm_name": "mistralai/mistral-7b-instruct-v0.3",
1039
- "tags": "LLM CHAT 32K",
1040
- "max_tokens": 32768,
1041
- "model_type": "chat"
1042
- },
1043
- {
1044
- "llm_name": "nousresearch/hermes-2-pro-llama-3-8b",
1045
- "tags": "LLM CHAT 8K",
1046
- "max_tokens": 8192,
1047
- "model_type": "chat"
1048
- },
1049
- {
1050
- "llm_name": "microsoft/phi-3-mini-128k-instruct",
1051
- "tags": "LLM CHAT 125K",
1052
- "max_tokens": 128000,
1053
- "model_type": "chat"
1054
- },
1055
- {
1056
- "llm_name": "microsoft/phi-3-mini-128k-instruct:free",
1057
- "tags": "LLM CHAT 125K",
1058
- "max_tokens": 128000,
1059
- "model_type": "chat"
1060
- },
1061
- {
1062
- "llm_name": "microsoft/phi-3-medium-128k-instruct",
1063
- "tags": "LLM CHAT 125K",
1064
- "max_tokens": 128000,
1065
- "model_type": "chat"
1066
- },
1067
- {
1068
- "llm_name": "microsoft/phi-3-medium-128k-instruct:free",
1069
- "tags": "LLM CHAT 125K",
1070
- "max_tokens": 128000,
1071
- "model_type": "chat"
1072
- },
1073
- {
1074
- "llm_name": "neversleep/llama-3-lumimaid-70b",
1075
- "tags": "LLM CHAT 8K",
1076
- "max_tokens": 8192,
1077
- "model_type": "chat"
1078
- },
1079
- {
1080
- "llm_name": "google/gemini-flash-1.5",
1081
- "tags": "LLM IMAGE2TEXT 2734K",
1082
- "max_tokens": 2800000,
1083
- "model_type": "image2text"
1084
- },
1085
- {
1086
- "llm_name": "perplexity/llama-3-sonar-small-32k-chat",
1087
- "tags": "LLM CHAT 32K",
1088
- "max_tokens": 32768,
1089
- "model_type": "chat"
1090
- },
1091
- {
1092
- "llm_name": "perplexity/llama-3-sonar-small-32k-online",
1093
- "tags": "LLM CHAT 28K",
1094
- "max_tokens": 28000,
1095
- "model_type": "chat"
1096
- },
1097
- {
1098
- "llm_name": "perplexity/llama-3-sonar-large-32k-chat",
1099
- "tags": "LLM CHAT 32K",
1100
- "max_tokens": 32768,
1101
- "model_type": "chat"
1102
- },
1103
- {
1104
- "llm_name": "perplexity/llama-3-sonar-large-32k-online",
1105
- "tags": "LLM CHAT 28K",
1106
- "max_tokens": 28000,
1107
- "model_type": "chat"
1108
- },
1109
- {
1110
- "llm_name": "deepseek/deepseek-chat",
1111
- "tags": "LLM CHAT 125K",
1112
- "max_tokens": 128000,
1113
- "model_type": "chat"
1114
- },
1115
- {
1116
- "llm_name": "deepseek/deepseek-coder",
1117
- "tags": "LLM CHAT 125K",
1118
- "max_tokens": 128000,
1119
- "model_type": "chat"
1120
- },
1121
- {
1122
- "llm_name": "openai/gpt-4o",
1123
- "tags": "LLM IMAGE2TEXT 125K",
1124
- "max_tokens": 128000,
1125
- "model_type": "image2text"
1126
- },
1127
- {
1128
- "llm_name": "openai/gpt-4o-2024-05-13",
1129
- "tags": "LLM IMAGE2TEXT 125K",
1130
- "max_tokens": 128000,
1131
- "model_type": "image2text"
1132
- },
1133
- {
1134
- "llm_name": "meta-llama/llama-3-8b",
1135
- "tags": "LLM CHAT 8K",
1136
- "max_tokens": 8192,
1137
- "model_type": "chat"
1138
- },
1139
- {
1140
- "llm_name": "meta-llama/llama-3-70b",
1141
- "tags": "LLM CHAT 8K",
1142
- "max_tokens": 8192,
1143
- "model_type": "chat"
1144
- },
1145
- {
1146
- "llm_name": "meta-llama/llama-guard-2-8b",
1147
- "tags": "LLM CHAT 8K",
1148
- "max_tokens": 8192,
1149
- "model_type": "chat"
1150
- },
1151
- {
1152
- "llm_name": "liuhaotian/llava-yi-34b",
1153
- "tags": "LLM IMAGE2TEXT 4K",
1154
- "max_tokens": 4096,
1155
- "model_type": "image2text"
1156
- },
1157
- {
1158
- "llm_name": "allenai/olmo-7b-instruct",
1159
- "tags": "LLM CHAT 2K",
1160
- "max_tokens": 2048,
1161
- "model_type": "chat"
1162
- },
1163
- {
1164
- "llm_name": "qwen/qwen-110b-chat",
1165
- "tags": "LLM CHAT 32K",
1166
- "max_tokens": 32768,
1167
- "model_type": "chat"
1168
- },
1169
- {
1170
- "llm_name": "qwen/qwen-72b-chat",
1171
- "tags": "LLM CHAT 32K",
1172
- "max_tokens": 32768,
1173
- "model_type": "chat"
1174
- },
1175
- {
1176
- "llm_name": "qwen/qwen-32b-chat",
1177
- "tags": "LLM CHAT 32K",
1178
- "max_tokens": 32768,
1179
- "model_type": "chat"
1180
- },
1181
- {
1182
- "llm_name": "qwen/qwen-14b-chat",
1183
- "tags": "LLM CHAT 32K",
1184
- "max_tokens": 32768,
1185
- "model_type": "chat"
1186
- },
1187
- {
1188
- "llm_name": "qwen/qwen-7b-chat",
1189
- "tags": "LLM CHAT 32K",
1190
- "max_tokens": 32768,
1191
- "model_type": "chat"
1192
- },
1193
- {
1194
- "llm_name": "qwen/qwen-4b-chat",
1195
- "tags": "LLM CHAT 32K",
1196
- "max_tokens": 32768,
1197
- "model_type": "chat"
1198
- },
1199
- {
1200
- "llm_name": "meta-llama/llama-3-8b-instruct:free",
1201
- "tags": "LLM CHAT 8K",
1202
- "max_tokens": 8192,
1203
- "model_type": "chat"
1204
- },
1205
- {
1206
- "llm_name": "neversleep/llama-3-lumimaid-8b",
1207
- "tags": "LLM CHAT 24K",
1208
- "max_tokens": 24576,
1209
- "model_type": "chat"
1210
- },
1211
- {
1212
- "llm_name": "neversleep/llama-3-lumimaid-8b:extended",
1213
- "tags": "LLM CHAT 24K",
1214
- "max_tokens": 24576,
1215
- "model_type": "chat"
1216
- },
1217
- {
1218
- "llm_name": "snowflake/snowflake-arctic-instruct",
1219
- "tags": "LLM CHAT 4K",
1220
- "max_tokens": 4096,
1221
- "model_type": "chat"
1222
- },
1223
- {
1224
- "llm_name": "fireworks/firellava-13b",
1225
- "tags": "LLM IMAGE2TEXT 4K",
1226
- "max_tokens": 4096,
1227
- "model_type": "image2text"
1228
- },
1229
- {
1230
- "llm_name": "lynn/soliloquy-l3",
1231
- "tags": "LLM CHAT 24K",
1232
- "max_tokens": 24576,
1233
- "model_type": "chat"
1234
- },
1235
- {
1236
- "llm_name": "sao10k/fimbulvetr-11b-v2",
1237
- "tags": "LLM CHAT 8K",
1238
- "max_tokens": 8192,
1239
- "model_type": "chat"
1240
- },
1241
- {
1242
- "llm_name": "meta-llama/llama-3-8b-instruct:extended",
1243
- "tags": "LLM CHAT 16K",
1244
- "max_tokens": 16384,
1245
- "model_type": "chat"
1246
- },
1247
- {
1248
- "llm_name": "meta-llama/llama-3-8b-instruct:nitro",
1249
- "tags": "LLM CHAT 8K",
1250
- "max_tokens": 8192,
1251
- "model_type": "chat"
1252
- },
1253
- {
1254
- "llm_name": "meta-llama/llama-3-70b-instruct:nitro",
1255
- "tags": "LLM CHAT 8K",
1256
- "max_tokens": 8192,
1257
- "model_type": "chat"
1258
- },
1259
- {
1260
- "llm_name": "meta-llama/llama-3-8b-instruct",
1261
- "tags": "LLM CHAT 8K",
1262
- "max_tokens": 8192,
1263
- "model_type": "chat"
1264
- },
1265
- {
1266
- "llm_name": "meta-llama/llama-3-70b-instruct",
1267
- "tags": "LLM CHAT 8K",
1268
- "max_tokens": 8192,
1269
- "model_type": "chat"
1270
- },
1271
- {
1272
- "llm_name": "mistralai/mixtral-8x22b-instruct",
1273
- "tags": "LLM CHAT 64K",
1274
- "max_tokens": 65536,
1275
- "model_type": "chat"
1276
- },
1277
- {
1278
- "llm_name": "microsoft/wizardlm-2-8x22b",
1279
- "tags": "LLM CHAT 64K",
1280
- "max_tokens": 65536,
1281
- "model_type": "chat"
1282
- },
1283
- {
1284
- "llm_name": "microsoft/wizardlm-2-7b",
1285
- "tags": "LLM CHAT 32K",
1286
- "max_tokens": 32000,
1287
- "model_type": "chat"
1288
- },
1289
- {
1290
- "llm_name": "undi95/toppy-m-7b:nitro",
1291
- "tags": "LLM CHAT 4K",
1292
- "max_tokens": 4096,
1293
- "model_type": "chat"
1294
- },
1295
- {
1296
- "llm_name": "mistralai/mixtral-8x22b",
1297
- "tags": "LLM CHAT 64K",
1298
- "max_tokens": 65536,
1299
- "model_type": "chat"
1300
- },
1301
- {
1302
- "llm_name": "openai/gpt-4-turbo",
1303
- "tags": "LLM IMAGE2TEXT 125K",
1304
- "max_tokens": 128000,
1305
- "model_type": "image2text"
1306
- },
1307
- {
1308
- "llm_name": "google/gemini-pro-1.5",
1309
- "tags": "LLM IMAGE2TEXT 2734K",
1310
- "max_tokens": 2800000,
1311
- "model_type": "image2text"
1312
- },
1313
- {
1314
- "llm_name": "cohere/command-r-plus",
1315
- "tags": "LLM CHAT 125K",
1316
- "max_tokens": 128000,
1317
- "model_type": "chat"
1318
- },
1319
- {
1320
- "llm_name": "databricks/dbrx-instruct",
1321
- "tags": "LLM CHAT 32K",
1322
- "max_tokens": 32768,
1323
- "model_type": "chat"
1324
- },
1325
- {
1326
- "llm_name": "sophosympatheia/midnight-rose-70b",
1327
- "tags": "LLM CHAT 4K",
1328
- "max_tokens": 4096,
1329
- "model_type": "chat"
1330
- },
1331
- {
1332
- "llm_name": "cohere/command",
1333
- "tags": "LLM CHAT 4K",
1334
- "max_tokens": 4096,
1335
- "model_type": "chat"
1336
- },
1337
- {
1338
- "llm_name": "cohere/command-r",
1339
- "tags": "LLM CHAT 125K",
1340
- "max_tokens": 128000,
1341
- "model_type": "chat"
1342
- },
1343
- {
1344
- "llm_name": "anthropic/claude-3-haiku",
1345
- "tags": "LLM IMAGE2TEXT 195K",
1346
- "max_tokens": 200000,
1347
- "model_type": "image2text"
1348
- },
1349
- {
1350
- "llm_name": "anthropic/claude-3-haiku:beta",
1351
- "tags": "LLM IMAGE2TEXT 195K",
1352
- "max_tokens": 200000,
1353
- "model_type": "image2text"
1354
- },
1355
- {
1356
- "llm_name": "google/gemma-7b-it:nitro",
1357
- "tags": "LLM CHAT 8K",
1358
- "max_tokens": 8192,
1359
- "model_type": "chat"
1360
- },
1361
- {
1362
- "llm_name": "mistralai/mixtral-8x7b-instruct:nitro",
1363
- "tags": "LLM CHAT 32K",
1364
- "max_tokens": 32768,
1365
- "model_type": "chat"
1366
- },
1367
- {
1368
- "llm_name": "mistralai/mistral-7b-instruct:nitro",
1369
- "tags": "LLM CHAT 32K",
1370
- "max_tokens": 32768,
1371
- "model_type": "chat"
1372
- },
1373
- {
1374
- "llm_name": "meta-llama/llama-2-70b-chat:nitro",
1375
- "tags": "LLM CHAT 4K",
1376
- "max_tokens": 4096,
1377
- "model_type": "chat"
1378
- },
1379
- {
1380
- "llm_name": "gryphe/mythomax-l2-13b:nitro",
1381
- "tags": "LLM CHAT 4K",
1382
- "max_tokens": 4096,
1383
- "model_type": "chat"
1384
- },
1385
- {
1386
- "llm_name": "anthropic/claude-3-opus",
1387
- "tags": "LLM IMAGE2TEXT 195K",
1388
- "max_tokens": 200000,
1389
- "model_type": "image2text"
1390
- },
1391
- {
1392
- "llm_name": "anthropic/claude-3-sonnet",
1393
- "tags": "LLM IMAGE2TEXT 195K",
1394
- "max_tokens": 200000,
1395
- "model_type": "image2text"
1396
- },
1397
- {
1398
- "llm_name": "anthropic/claude-3-opus:beta",
1399
- "tags": "LLM IMAGE2TEXT 195K",
1400
- "max_tokens": 200000,
1401
- "model_type": "image2text"
1402
- },
1403
- {
1404
- "llm_name": "anthropic/claude-3-sonnet:beta",
1405
- "tags": "LLM IMAGE2TEXT 195K",
1406
- "max_tokens": 200000,
1407
- "model_type": "image2text"
1408
- },
1409
- {
1410
- "llm_name": "mistralai/mistral-large",
1411
- "tags": "LLM CHAT 32K",
1412
- "max_tokens": 32000,
1413
- "model_type": "chat"
1414
- },
1415
- {
1416
- "llm_name": "google/gemma-7b-it",
1417
- "tags": "LLM CHAT 8K",
1418
- "max_tokens": 8192,
1419
- "model_type": "chat"
1420
- },
1421
- {
1422
- "llm_name": "google/gemma-7b-it:free",
1423
- "tags": "LLM CHAT 8K",
1424
- "max_tokens": 8192,
1425
- "model_type": "chat"
1426
- },
1427
- {
1428
- "llm_name": "nousresearch/nous-hermes-2-mistral-7b-dpo",
1429
- "tags": "LLM CHAT 8K",
1430
- "max_tokens": 8192,
1431
- "model_type": "chat"
1432
- },
1433
- {
1434
- "llm_name": "meta-llama/codellama-70b-instruct",
1435
- "tags": "LLM CHAT 2K",
1436
- "max_tokens": 2048,
1437
- "model_type": "chat"
1438
- },
1439
- {
1440
- "llm_name": "recursal/eagle-7b",
1441
- "tags": "LLM CHAT 9K",
1442
- "max_tokens": 10000,
1443
- "model_type": "chat"
1444
- },
1445
- {
1446
- "llm_name": "openai/gpt-3.5-turbo-0613",
1447
- "tags": "LLM CHAT 4K",
1448
- "max_tokens": 4095,
1449
- "model_type": "chat"
1450
- },
1451
- {
1452
- "llm_name": "openai/gpt-4-turbo-preview",
1453
- "tags": "LLM CHAT 125K",
1454
- "max_tokens": 128000,
1455
- "model_type": "chat"
1456
- },
1457
- {
1458
- "llm_name": "undi95/remm-slerp-l2-13b:extended",
1459
- "tags": "LLM CHAT 6K",
1460
- "max_tokens": 6144,
1461
- "model_type": "chat"
1462
- },
1463
- {
1464
- "llm_name": "nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
1465
- "tags": "LLM CHAT 32K",
1466
- "max_tokens": 32768,
1467
- "model_type": "chat"
1468
- },
1469
- {
1470
- "llm_name": "nousresearch/nous-hermes-2-mixtral-8x7b-sft",
1471
- "tags": "LLM CHAT 32K",
1472
- "max_tokens": 32768,
1473
- "model_type": "chat"
1474
- },
1475
- {
1476
- "llm_name": "mistralai/mistral-tiny",
1477
- "tags": "LLM CHAT 32K",
1478
- "max_tokens": 32000,
1479
- "model_type": "chat"
1480
- },
1481
- {
1482
- "llm_name": "mistralai/mistral-small",
1483
- "tags": "LLM CHAT 32K",
1484
- "max_tokens": 32000,
1485
- "model_type": "chat"
1486
- },
1487
- {
1488
- "llm_name": "mistralai/mistral-medium",
1489
- "tags": "LLM CHAT 32K",
1490
- "max_tokens": 32000,
1491
- "model_type": "chat"
1492
- },
1493
- {
1494
- "llm_name": "austism/chronos-hermes-13b",
1495
- "tags": "LLM CHAT 4K",
1496
- "max_tokens": 4096,
1497
- "model_type": "chat"
1498
- },
1499
- {
1500
- "llm_name": "neversleep/noromaid-mixtral-8x7b-instruct",
1501
- "tags": "LLM CHAT 8K",
1502
- "max_tokens": 8000,
1503
- "model_type": "chat"
1504
- },
1505
- {
1506
- "llm_name": "nousresearch/nous-hermes-yi-34b",
1507
- "tags": "LLM CHAT 4K",
1508
- "max_tokens": 4096,
1509
- "model_type": "chat"
1510
- },
1511
- {
1512
- "llm_name": "mistralai/mistral-7b-instruct-v0.2",
1513
- "tags": "LLM CHAT 32K",
1514
- "max_tokens": 32768,
1515
- "model_type": "chat"
1516
- },
1517
- {
1518
- "llm_name": "cognitivecomputations/dolphin-mixtral-8x7b",
1519
- "tags": "LLM CHAT 32K",
1520
- "max_tokens": 32768,
1521
- "model_type": "chat"
1522
- },
1523
- {
1524
- "llm_name": "google/gemini-pro",
1525
- "tags": "LLM CHAT 89K",
1526
- "max_tokens": 91728,
1527
- "model_type": "chat"
1528
- },
1529
- {
1530
- "llm_name": "google/gemini-pro-vision",
1531
- "tags": "LLM IMAGE2TEXT 44K",
1532
- "max_tokens": 45875,
1533
- "model_type": "image2text"
1534
- },
1535
- {
1536
- "llm_name": "mistralai/mixtral-8x7b",
1537
- "tags": "LLM CHAT 32K",
1538
- "max_tokens": 32768,
1539
- "model_type": "chat"
1540
- },
1541
- {
1542
- "llm_name": "mistralai/mixtral-8x7b-instruct",
1543
- "tags": "LLM CHAT 32K",
1544
- "max_tokens": 32768,
1545
- "model_type": "chat"
1546
- },
1547
- {
1548
- "llm_name": "rwkv/rwkv-5-world-3b",
1549
- "tags": "LLM CHAT 9K",
1550
- "max_tokens": 10000,
1551
- "model_type": "chat"
1552
- },
1553
- {
1554
- "llm_name": "recursal/rwkv-5-3b-ai-town",
1555
- "tags": "LLM CHAT 9K",
1556
- "max_tokens": 10000,
1557
- "model_type": "chat"
1558
- },
1559
- {
1560
- "llm_name": "togethercomputer/stripedhyena-nous-7b",
1561
- "tags": "LLM CHAT 32K",
1562
- "max_tokens": 32768,
1563
- "model_type": "chat"
1564
- },
1565
- {
1566
- "llm_name": "togethercomputer/stripedhyena-hessian-7b",
1567
- "tags": "LLM CHAT 32K",
1568
- "max_tokens": 32768,
1569
- "model_type": "chat"
1570
- },
1571
- {
1572
- "llm_name": "koboldai/psyfighter-13b-2",
1573
- "tags": "LLM CHAT 4K",
1574
- "max_tokens": 4096,
1575
- "model_type": "chat"
1576
- },
1577
- {
1578
- "llm_name": "gryphe/mythomist-7b",
1579
- "tags": "LLM CHAT 32K",
1580
- "max_tokens": 32768,
1581
- "model_type": "chat"
1582
- },
1583
- {
1584
- "llm_name": "openrouter/cinematika-7b",
1585
- "tags": "LLM CHAT 8K",
1586
- "max_tokens": 8000,
1587
- "model_type": "chat"
1588
- },
1589
- {
1590
- "llm_name": "nousresearch/nous-capybara-7b",
1591
- "tags": "LLM CHAT 8K",
1592
- "max_tokens": 8192,
1593
- "model_type": "chat"
1594
- },
1595
- {
1596
- "llm_name": "nousresearch/nous-capybara-7b:free",
1597
- "tags": "LLM CHAT 8K",
1598
- "max_tokens": 8192,
1599
- "model_type": "chat"
1600
- },
1601
- {
1602
- "llm_name": "openchat/openchat-7b",
1603
- "tags": "LLM CHAT 8K",
1604
- "max_tokens": 8192,
1605
- "model_type": "chat"
1606
- },
1607
- {
1608
- "llm_name": "openchat/openchat-7b:free",
1609
- "tags": "LLM CHAT 8K",
1610
- "max_tokens": 8192,
1611
- "model_type": "chat"
1612
- },
1613
- {
1614
- "llm_name": "neversleep/noromaid-20b",
1615
- "tags": "LLM CHAT 8K",
1616
- "max_tokens": 8192,
1617
- "model_type": "chat"
1618
- },
1619
- {
1620
- "llm_name": "gryphe/mythomist-7b:free",
1621
- "tags": "LLM CHAT 32K",
1622
- "max_tokens": 32768,
1623
- "model_type": "chat"
1624
- },
1625
- {
1626
- "llm_name": "intel/neural-chat-7b",
1627
- "tags": "LLM CHAT 4K",
1628
- "max_tokens": 4096,
1629
- "model_type": "chat"
1630
- },
1631
- {
1632
- "llm_name": "anthropic/claude-2",
1633
- "tags": "LLM CHAT 195K",
1634
- "max_tokens": 200000,
1635
- "model_type": "chat"
1636
- },
1637
- {
1638
- "llm_name": "anthropic/claude-2.1",
1639
- "tags": "LLM CHAT 195K",
1640
- "max_tokens": 200000,
1641
- "model_type": "chat"
1642
- },
1643
- {
1644
- "llm_name": "anthropic/claude-instant-1.1",
1645
- "tags": "LLM CHAT 98K",
1646
- "max_tokens": 100000,
1647
- "model_type": "chat"
1648
- },
1649
- {
1650
- "llm_name": "anthropic/claude-2:beta",
1651
- "tags": "LLM CHAT 195K",
1652
- "max_tokens": 200000,
1653
- "model_type": "chat"
1654
- },
1655
- {
1656
- "llm_name": "anthropic/claude-2.1:beta",
1657
- "tags": "LLM CHAT 195K",
1658
- "max_tokens": 200000,
1659
- "model_type": "chat"
1660
- },
1661
- {
1662
- "llm_name": "teknium/openhermes-2.5-mistral-7b",
1663
- "tags": "LLM CHAT 4K",
1664
- "max_tokens": 4096,
1665
- "model_type": "chat"
1666
- },
1667
- {
1668
- "llm_name": "nousresearch/nous-capybara-34b",
1669
- "tags": "LLM CHAT 195K",
1670
- "max_tokens": 200000,
1671
- "model_type": "chat"
1672
- },
1673
- {
1674
- "llm_name": "openai/gpt-4-vision-preview",
1675
- "tags": "LLM IMAGE2TEXT 125K",
1676
- "max_tokens": 128000,
1677
- "model_type": "image2text"
1678
- },
1679
- {
1680
- "llm_name": "lizpreciatior/lzlv-70b-fp16-hf",
1681
- "tags": "LLM CHAT 4K",
1682
- "max_tokens": 4096,
1683
- "model_type": "chat"
1684
- },
1685
- {
1686
- "llm_name": "undi95/toppy-m-7b",
1687
- "tags": "LLM CHAT 4K",
1688
- "max_tokens": 4096,
1689
- "model_type": "chat"
1690
- },
1691
- {
1692
- "llm_name": "alpindale/goliath-120b",
1693
- "tags": "LLM CHAT 6K",
1694
- "max_tokens": 6144,
1695
- "model_type": "chat"
1696
- },
1697
- {
1698
- "llm_name": "undi95/toppy-m-7b:free",
1699
- "tags": "LLM CHAT 4K",
1700
- "max_tokens": 4096,
1701
- "model_type": "chat"
1702
- },
1703
- {
1704
- "llm_name": "openrouter/auto",
1705
- "tags": "LLM CHAT 195K",
1706
- "max_tokens": 200000,
1707
- "model_type": "chat"
1708
- },
1709
- {
1710
- "llm_name": "openai/gpt-3.5-turbo-1106",
1711
- "tags": "LLM CHAT 16K",
1712
- "max_tokens": 16385,
1713
- "model_type": "chat"
1714
- },
1715
- {
1716
- "llm_name": "openai/gpt-4-1106-preview",
1717
- "tags": "LLM CHAT 125K",
1718
- "max_tokens": 128000,
1719
- "model_type": "chat"
1720
- },
1721
- {
1722
- "llm_name": "huggingfaceh4/zephyr-7b-beta:free",
1723
- "tags": "LLM CHAT 4K",
1724
- "max_tokens": 4096,
1725
- "model_type": "chat"
1726
- },
1727
- {
1728
- "llm_name": "google/palm-2-chat-bison-32k",
1729
- "tags": "LLM CHAT 89K",
1730
- "max_tokens": 91750,
1731
- "model_type": "chat"
1732
- },
1733
- {
1734
- "llm_name": "google/palm-2-codechat-bison-32k",
1735
- "tags": "LLM CHAT 89K",
1736
- "max_tokens": 91750,
1737
- "model_type": "chat"
1738
- },
1739
- {
1740
- "llm_name": "teknium/openhermes-2-mistral-7b",
1741
- "tags": "LLM CHAT 8K",
1742
- "max_tokens": 8192,
1743
- "model_type": "chat"
1744
- },
1745
- {
1746
- "llm_name": "open-orca/mistral-7b-openorca",
1747
- "tags": "LLM CHAT 8K",
1748
- "max_tokens": 8192,
1749
- "model_type": "chat"
1750
- },
1751
- {
1752
- "llm_name": "gryphe/mythomax-l2-13b:extended",
1753
- "tags": "LLM CHAT 8K",
1754
- "max_tokens": 8192,
1755
- "model_type": "chat"
1756
- },
1757
- {
1758
- "llm_name": "xwin-lm/xwin-lm-70b",
1759
- "tags": "LLM CHAT 8K",
1760
- "max_tokens": 8192,
1761
- "model_type": "chat"
1762
- },
1763
- {
1764
- "llm_name": "openai/gpt-3.5-turbo-instruct",
1765
- "tags": "LLM CHAT 4K",
1766
- "max_tokens": 4095,
1767
- "model_type": "chat"
1768
- },
1769
- {
1770
- "llm_name": "mistralai/mistral-7b-instruct-v0.1",
1771
- "tags": "LLM CHAT 4K",
1772
- "max_tokens": 4096,
1773
- "model_type": "chat"
1774
- },
1775
- {
1776
- "llm_name": "mistralai/mistral-7b-instruct:free",
1777
- "tags": "LLM CHAT 32K",
1778
- "max_tokens": 32768,
1779
- "model_type": "chat"
1780
- },
1781
- {
1782
- "llm_name": "pygmalionai/mythalion-13b",
1783
- "tags": "LLM CHAT 8K",
1784
- "max_tokens": 8192,
1785
- "model_type": "chat"
1786
- },
1787
- {
1788
- "llm_name": "openai/gpt-3.5-turbo-16k",
1789
- "tags": "LLM CHAT 16K",
1790
- "max_tokens": 16385,
1791
- "model_type": "chat"
1792
- },
1793
- {
1794
- "llm_name": "openai/gpt-4-32k",
1795
- "tags": "LLM CHAT 32K",
1796
- "max_tokens": 32767,
1797
- "model_type": "chat"
1798
- },
1799
- {
1800
- "llm_name": "openai/gpt-4-32k-0314",
1801
- "tags": "LLM CHAT 32K",
1802
- "max_tokens": 32767,
1803
- "model_type": "chat"
1804
- },
1805
- {
1806
- "llm_name": "meta-llama/codellama-34b-instruct",
1807
- "tags": "LLM CHAT 8K",
1808
- "max_tokens": 8192,
1809
- "model_type": "chat"
1810
- },
1811
- {
1812
- "llm_name": "phind/phind-codellama-34b",
1813
- "tags": "LLM CHAT 4K",
1814
- "max_tokens": 4096,
1815
- "model_type": "chat"
1816
- },
1817
- {
1818
- "llm_name": "nousresearch/nous-hermes-llama2-13b",
1819
- "tags": "LLM CHAT 4K",
1820
- "max_tokens": 4096,
1821
- "model_type": "chat"
1822
- },
1823
- {
1824
- "llm_name": "mancer/weaver",
1825
- "tags": "LLM CHAT 8K",
1826
- "max_tokens": 8000,
1827
- "model_type": "chat"
1828
- },
1829
- {
1830
- "llm_name": "anthropic/claude-2.0",
1831
- "tags": "LLM CHAT 98K",
1832
- "max_tokens": 100000,
1833
- "model_type": "chat"
1834
- },
1835
- {
1836
- "llm_name": "anthropic/claude-instant-1",
1837
- "tags": "LLM CHAT 98K",
1838
- "max_tokens": 100000,
1839
- "model_type": "chat"
1840
- },
1841
- {
1842
- "llm_name": "anthropic/claude-1",
1843
- "tags": "LLM CHAT 98K",
1844
- "max_tokens": 100000,
1845
- "model_type": "chat"
1846
- },
1847
- {
1848
- "llm_name": "anthropic/claude-1.2",
1849
- "tags": "LLM CHAT 98K",
1850
- "max_tokens": 100000,
1851
- "model_type": "chat"
1852
- },
1853
- {
1854
- "llm_name": "anthropic/claude-instant-1.0",
1855
- "tags": "LLM CHAT 98K",
1856
- "max_tokens": 100000,
1857
- "model_type": "chat"
1858
- },
1859
- {
1860
- "llm_name": "anthropic/claude-2.0:beta",
1861
- "tags": "LLM CHAT 98K",
1862
- "max_tokens": 100000,
1863
- "model_type": "chat"
1864
- },
1865
- {
1866
- "llm_name": "anthropic/claude-instant-1:beta",
1867
- "tags": "LLM CHAT 98K",
1868
- "max_tokens": 100000,
1869
- "model_type": "chat"
1870
- },
1871
- {
1872
- "llm_name": "undi95/remm-slerp-l2-13b",
1873
- "tags": "LLM CHAT 4K",
1874
- "max_tokens": 4096,
1875
- "model_type": "chat"
1876
- },
1877
- {
1878
- "llm_name": "google/palm-2-chat-bison",
1879
- "tags": "LLM CHAT 25K",
1880
- "max_tokens": 25804,
1881
- "model_type": "chat"
1882
- },
1883
- {
1884
- "llm_name": "google/palm-2-codechat-bison",
1885
- "tags": "LLM CHAT 19K",
1886
- "max_tokens": 20070,
1887
- "model_type": "chat"
1888
- },
1889
- {
1890
- "llm_name": "gryphe/mythomax-l2-13b",
1891
- "tags": "LLM CHAT 4K",
1892
- "max_tokens": 4096,
1893
- "model_type": "chat"
1894
- },
1895
- {
1896
- "llm_name": "meta-llama/llama-2-13b-chat",
1897
- "tags": "LLM CHAT 4K",
1898
- "max_tokens": 4096,
1899
- "model_type": "chat"
1900
- },
1901
- {
1902
- "llm_name": "meta-llama/llama-2-70b-chat",
1903
- "tags": "LLM CHAT 4K",
1904
- "max_tokens": 4096,
1905
- "model_type": "chat"
1906
- },
1907
- {
1908
- "llm_name": "openai/gpt-3.5-turbo",
1909
- "tags": "LLM CHAT 16K",
1910
- "max_tokens": 16385,
1911
- "model_type": "chat"
1912
- },
1913
- {
1914
- "llm_name": "openai/gpt-3.5-turbo-0125",
1915
- "tags": "LLM CHAT 16K",
1916
- "max_tokens": 16385,
1917
- "model_type": "chat"
1918
- },
1919
- {
1920
- "llm_name": "openai/gpt-3.5-turbo-0301",
1921
- "tags": "LLM CHAT 4K",
1922
- "max_tokens": 4095,
1923
- "model_type": "chat"
1924
- },
1925
- {
1926
- "llm_name": "openai/gpt-4",
1927
- "tags": "LLM CHAT 8K",
1928
- "max_tokens": 8191,
1929
- "model_type": "chat"
1930
- },
1931
- {
1932
- "llm_name": "openai/gpt-4-0314",
1933
- "tags": "LLM CHAT 8K",
1934
- "max_tokens": 8191,
1935
- "model_type": "chat"
1936
- },
1937
- {
1938
- "llm_name": "01-ai/yi-large",
1939
- "tags": "LLM CHAT 32K",
1940
- "max_tokens": 32768,
1941
- "model_type": "chat"
1942
- },
1943
- {
1944
- "llm_name": "01-ai/yi-34b-200k",
1945
- "tags": "LLM CHAT 195K",
1946
- "max_tokens": 200000,
1947
- "model_type": "chat"
1948
- },
1949
- {
1950
- "llm_name": "01-ai/yi-34b-chat",
1951
- "tags": "LLM CHAT 4K",
1952
- "max_tokens": 4096,
1953
- "model_type": "chat"
1954
- },
1955
- {
1956
- "llm_name": "01-ai/yi-34b",
1957
- "tags": "LLM CHAT 4K",
1958
- "max_tokens": 4096,
1959
- "model_type": "chat"
1960
- },
1961
- {
1962
- "llm_name": "01-ai/yi-6b",
1963
- "tags": "LLM CHAT 4K",
1964
- "max_tokens": 4096,
1965
- "model_type": "chat"
1966
- }
1967
- ]
1968
  },
1969
  {
1970
  "name": "StepFun",
 
937
  "logo": "",
938
  "tags": "LLM,IMAGE2TEXT",
939
  "status": "1",
940
+ "llm": []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
941
  },
942
  {
943
  "name": "StepFun",
web/src/pages/user-setting/constants.tsx CHANGED
@@ -17,4 +17,13 @@ export const UserSettingIconMap = {
17
 
18
  export * from '@/constants/setting';
19
 
20
- export const LocalLlmFactories = ['Ollama', 'Xinference','LocalAI','LM-Studio',"OpenAI-API-Compatible",'TogetherAI','Replicate'];
 
 
 
 
 
 
 
 
 
 
17
 
18
  export * from '@/constants/setting';
19
 
20
+ export const LocalLlmFactories = [
21
+ 'Ollama',
22
+ 'Xinference',
23
+ 'LocalAI',
24
+ 'LM-Studio',
25
+ 'OpenAI-API-Compatible',
26
+ 'TogetherAI',
27
+ 'Replicate',
28
+ 'OpenRouter',
29
+ ];