PsyMedLewd_v4 / mergekit_config.yml
Elfrino's picture
Upload folder using huggingface_hub
86010f2 verified
raw
history blame contribute delete
574 Bytes
slices:
- sources:
- model: Undi95/PsyMedRP-v1-20B
layer_range: [0, 62] # PsyMedRP has 62 layers
- model: Undi95/MXLewd-L2-20B
layer_range: [0, 62] # MXLewd has 62 layers
merge_method: slerp # Changing to SLERP method
base_model: Undi95/PsyMedRP-v1-20B # Focus on reasoning from PsyMedRP
parameters:
t:
- filter: self_attn
value: [.3, .6, .9, .6, .3] # smooth gradient of focus
value: [.3, .6, .9, .6, .3] # consistent level of creativity and abstract reasoning
- value: 0.639
dtype: bfloat16 # Use preferred dtype