models: - model: migtissera/Tess-3-Llama-3.1-70B parameters: density: 0.7 weight: - value: 0.75 - model: HODACHI/Llama-3.1-70B-EZO-1.1-it parameters: density: 0.2 weight: - value: [1, 0.75, 0.5, 0.25, 0, 0, 0, 0, 0.0, 0.5, 1] - model: shenzhi-wang/Llama3.1-70B-Chinese-Chat parameters: density: 0.2 weight: - value: [1, 0.75, 0.5, 0.25, 0, 0, 0, 0, 0.0, 0.5, 1] - model: Saxo/Linkbricks-Horizon-AI-Korean-llama3.1-sft-dpo-70B parameters: density: 0.2 weight: - value: [1, 0.75, 0.5, 0.25, 0, 0, 0, 0, 0.0, 0.5, 1] merge_method: della_linear base_model: migtissera/Tess-3-Llama-3.1-70B parameters: normalize: true dtype: bfloat16