zyh990312 commited on
Commit
47833b5
1 Parent(s): a0b4aa4

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -20,12 +20,12 @@
20
  "revision": null,
21
  "target_modules": [
22
  "down_proj",
23
- "v_proj",
24
- "q_proj",
25
- "up_proj",
26
  "k_proj",
27
  "o_proj",
28
- "gate_proj"
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
20
  "revision": null,
21
  "target_modules": [
22
  "down_proj",
 
 
 
23
  "k_proj",
24
  "o_proj",
25
+ "gate_proj",
26
+ "q_proj",
27
+ "v_proj",
28
+ "up_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:92c5d18876fbc2f0ad9fccb6e861aa9a3af1f71815788fe5358e61e117fc8b37
3
  size 5449517272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be867bcd1674040034465cfa480fee2bc69b927b0c9c32a26724044015047b21
3
  size 5449517272
runs/Mar03_01-52-58_056e44a10e79/events.out.tfevents.1709430799.056e44a10e79.4023.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e07f1d712aa71289d22afd310a92188b65b59ed6467877a53681055306cb9cd
3
+ size 18897
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d15e4d429cd53bda135401c8eb17f109419dbb11043871d5eba21eeb5783b863
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:494437ccea9ad4b2269a6485aa43d1e169b89dd834770fe5b47bf4c94656b710
3
  size 4920