Neo_7b-merge1 / mergekit_config.yml
DewEfresh's picture
Upload folder using huggingface_hub
4054150 verified
raw
history blame contribute delete
No virus
689 Bytes
slices:
- sources:
- model: m-a-p/neo_7b
layer_range: [0, 27] # 28 layers (0-27)
merge_method: slerp
base_model: m-a-p/neo_7b
parameters:
t:
- filter: self_attn
value: [0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75,
0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75,
0.5, 0.5, 0.5, 0.75]
- filter: mlp
value: [0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75,
0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75, 0.5, 0.5, 0.5, 0.75,
0.5, 0.5, 0.5, 0.75]
- value: 0.5 # Default value for other components
dtype: bfloat16
output_path: ./merged_reduced_neo_7b