base_model: /root/LLM/NQLSG-Qwen2.5-14B-Base2 chat_template: auto dtype: bfloat16 merge_method: sce parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 48] model: /root/LLM/NQLSG-Qwen2.5-14B-Base1 - layer_range: [0, 48] model: /root/LLM/NQLSG-Qwen2.5-14B-Base2 - layer_range: [0, 48] model: /root/LLM/NQLSG-Qwen2.5-14B-Base3 - layer_range: [0, 48] model: Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v6 - layer_range: [0, 48] model: suayptalha/Lamarckvergence-14B tokenizer: {}