base_model: mlabonne/NeuralBeagle14-7B dtype: bfloat16 merge_method: dare_ties models: - model: mlabonne/NeuralBeagle14-7B - model: mlabonne/AlphaMonarch-7B parameters: density: '0.53' weight: '0.4' - model: Intel/neural-chat-7b-v3-1 parameters: density: '0.53' weight: '0.3' - model: HuggingFaceH4/zephyr-7b-beta parameters: density: '0.53' weight: '0.3' parameters: int8_mask: true