File size: 495 Bytes
5e1b990 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
slices:
- sources:
- model: "NousResearch/Nous-Hermes-2-SOLAR-10.7B"
layer_range: [0, 16]
- sources:
- model: "Sao10K/Fimbulvetr-11B-v2"
layer_range: [8, 24]
- sources:
- model: "NousResearch/Nous-Hermes-2-SOLAR-10.7B"
layer_range: [17, 32]
- sources:
- model: "Sao10K/Fimbulvetr-11B-v2"
layer_range: [25, 40]
- sources:
- model: "NousResearch/Nous-Hermes-2-SOLAR-10.7B"
layer_range: [41, 48]
merge_method: passthrough
dtype: float16
|