models: - model: meta-llama/CodeLlama-7b-hf - model: meta-llama/CodeLlama-7b-Instruct-hf parameters: density: 0.55 weight: 1.0 merge_method: dare_ties base_model: meta-llama/CodeLlama-7b-hf parameters: int8_mask: true dtype: bfloat16