Inspire-7B-slerp_0.2 / mergekit_config.yml
tvkkishore's picture
Upload folder using huggingface_hub
f138a00 verified
raw
history blame
692 Bytes
slices:
- sources:
- model: mlabonne/AlphaMonarch-7B #mistralai/Mistral-7B-Instruct-v0.2
layer_range: [0, 32]
# - model: EmbeddedLLM/Mistral-7B-Merge-14-v0.1
# layer_range: [0, 32]
# - model: mlabonne/AlphaMonarch-7B
# layer_range: [0, 32]
- model: cognitivecomputations/dolphin-2.8-mistral-7b-v02
layer_range: [0, 32]
#mlabonne/AlphaMonarch-7B
merge_method: slerp
base_model: cognitivecomputations/dolphin-2.8-mistral-7b-v02 #mistralai/Mistral-7B-Instruct-v0.2
parameters:
t:
- filter: self_attn
value: [0, 0.5, 0.3, 0.7, 1]
- filter: mlp
value: [1, 0.5, 0.7, 0.3, 0]
- value: 0.5
dtype: bfloat16