slices: | |
- sources: | |
- model: Sao10K/Fimbulvetr-11B-v2 | |
layer_range: [0, 16] | |
parameters: | |
attention: | |
- filter: grouped_qk_proj | |
clusters: 8 | |
value: 1.2 | |
attention_type: hierarchical | |
local_attention: 0.5 | |
global_attention: 1.5 | |
dynamic_weighting: true | |
rope_scaling: 10000 | |
significance: 0.85 | |
mlp: | |
filter: intermediate_proj | |
value: 14336 | |
compression: true | |
dynamic_capacity: true | |
regularization: | |
filter: attention_dropout | |
value: 0.0 | |
- sources: | |
- model: Sao10K/Fimbulvetr-11B-v2 | |
layer_range: [16, 32] | |
parameters: | |
attention: | |
- filter: grouped_qk_proj | |
clusters: 8 | |
value: 1.3 | |
attention_type: hierarchical | |
local_attention: 0.6 | |
global_attention: 1.4 | |
dynamic_weighting: true | |
rope_scaling: 10000 | |
significance: 0.80 | |
mlp: | |
filter: intermediate_proj | |
value: 14336 | |
compression: true | |
dynamic_capacity: true | |
regularization: | |
filter: attention_dropout | |
value: 0.0 | |
- sources: | |
- model: Sao10K/Fimbulvetr-11B-v2 | |
layer_range: [32, 48] | |
parameters: | |
attention: | |
- filter: grouped_qk_proj | |
clusters: 8 | |
value: 1.5 | |
attention_type: hierarchical | |
local_attention: 0.7 | |
global_attention: 1.6 | |
dynamic_weighting: true | |
rope_scaling: 10000 | |
significance: 0.9 | |
mlp: | |
filter: intermediate_proj | |
value: 14336 | |
compression: true | |
dynamic_capacity: true | |
regularization: | |
filter: attention_dropout | |
value: 0.0 | |
merge_method: passthrough | |
dtype: bfloat16 |