models: - model: SicariusSicariiStuff/Negative_LLAMA_70B parameters: select_topk: 0.35 - model: Sao10K/70B-L3.3-mhnnn-x1 parameters: select_topk: 0.35 - model: Sao10K/L3-70B-Euryale-v2.1 parameters: select_topk: 0.35 merge_method: sce base_model: meta-llama/Llama-3.3-70B-Instruct parameters: int8_mask: true chat_template: llama3 tokenizer: source: union dtype: float32 out_dtype: bfloat16