base_model: meta-llama/Meta-Llama-3.1-8B chat_template: auto dtype: float16 merge_method: ties models: - model: mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated parameters: density: 0.5 weight: 0.5 - model: meta-llama/Meta-Llama-3.1-8B-Instruct parameters: density: 0.5 weight: 0.5 parameters: int8_mask: true normalize: false