File size: 433 Bytes
f5cba30 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
models:
- model: mistralai/Mistral-7B-Instruct-v0.1
- model: BioMistral/BioMistral-7B-TIES
parameters:
density: 0.5
weight: 1.0
- model: openchat/openchat-3.5-0106
parameters:
density: 0.5
weight: 1.0
- model: liminerity/M7-7b
parameters:
density: 0.5
weight: 1.0
merge_method: ties
base_model: mistralai/Mistral-7B-Instruct-v0.1
parameters:
normalize: true
dtype: bfloat16
|