zyh990312 commited on
Commit
eda610d
1 Parent(s): 9801556

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "down_proj",
 
 
23
  "k_proj",
24
- "o_proj",
25
  "gate_proj",
26
- "q_proj",
27
- "v_proj",
28
- "up_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "v_proj",
23
+ "up_proj",
24
+ "q_proj",
25
  "k_proj",
 
26
  "gate_proj",
27
+ "down_proj",
28
+ "o_proj"
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f058d85944e854256be9693581efea8b8cefac3b9597ea9830a3bd16595ca7cf
3
  size 5449517272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a0ca0caf49ab55cc1342db02f9c5915a34b0e42e25f625f44b1e74f06ea71db
3
  size 5449517272
runs/Mar04_04-40-20_f47fdcc61c5e/events.out.tfevents.1709527244.f47fdcc61c5e.2732.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b7e06c7f3728a5d8ff54032c39beadba9f86afb15bf36e8f8c250e9d920ae59
3
+ size 88
runs/Mar04_04-42-03_f47fdcc61c5e/events.out.tfevents.1709527388.f47fdcc61c5e.6339.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa21315cd92092c86f2ccd4c5961a9255fde1b62aa91dc0da402f27d390fc8cb
3
+ size 20048
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:494437ccea9ad4b2269a6485aa43d1e169b89dd834770fe5b47bf4c94656b710
3
- size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:498984671ffe01e139b9470002331e316a5d5e5a860e4060c28362daa7a983a8
3
+ size 4984