File size: 507 Bytes
b8fcbee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
dtype: float16
merge_method: passthrough
slices:
- sources:
  - model: yam-peleg/Experiment30-7B
    layer_range: [0, 16]
- sources:
  - model: Endevor/EndlessRP-v3-7B
    layer_range: [8, 24]
- sources:
  - model: SanjiWatsuki/Kunoichi-DPO-v2-7B
    layer_range: [17, 24]
- sources:
  - model: undi95/Toppy-M-7B
    layer_range: [20, 28]
- sources:
  - model: sanjiwatsuki/Loyal-Toppy-Bruins-Maid-7B-DARE
    layer_range: [28, 30]
- sources:
  - model: yam-peleg/Experiment30-7B
    layer_range: [29, 32]