models: | |
- model: shibiyaj/lawGPT-chat | |
parameters: | |
density: 0.5 | |
weight: 0.5 | |
merge_method: ties | |
base_model: AdityaXPV/Mistral-7B-law-sage-v0.3 | |
parameters: | |
normalize: false | |
int8_mask: true | |
dtype: float16 |
models: | |
- model: shibiyaj/lawGPT-chat | |
parameters: | |
density: 0.5 | |
weight: 0.5 | |
merge_method: ties | |
base_model: AdityaXPV/Mistral-7B-law-sage-v0.3 | |
parameters: | |
normalize: false | |
int8_mask: true | |
dtype: float16 |