models: - model: meta-llama/Meta-Llama-3-8B-Instruct - model: elyza/Llama-3-ELYZA-JP-8B parameters: density: 0.5 weight: 0.3 - model: instruction-pretrain/finance-Llama3-8B parameters: density: 0.5 weight: 0.3 merge_method: dare_ties base_model: meta-llama/Meta-Llama-3-8B-Instruct parameters: normalize: true dtype: float16