Vishal24 commited on
Commit
fabdfcc
1 Parent(s): d0db4ac

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +9 -9
  2. adapter_model.bin +1 -1
adapter_config.json CHANGED
@@ -20,18 +20,18 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "mlp.down_proj",
 
 
24
  "lm_head.weight",
25
- "mlp.gate_proj",
26
- "post_attention_layernorm.weight",
27
- "self_attn.q_proj",
28
- "self_attn.o_proj",
29
  "input_layernorm.weight",
30
- "self_attn.v_proj",
31
- "self_attn.k_proj",
 
 
 
32
  "self_attn.rotary_emb.inv_freq",
33
- "mlp.up_proj",
34
- "model.norm.weight"
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "mlp.up_proj",
24
+ "self_attn.k_proj",
25
+ "self_attn.v_proj",
26
  "lm_head.weight",
 
 
 
 
27
  "input_layernorm.weight",
28
+ "self_attn.q_proj",
29
+ "post_attention_layernorm.weight",
30
+ "mlp.gate_proj",
31
+ "mlp.down_proj",
32
+ "model.norm.weight",
33
  "self_attn.rotary_emb.inv_freq",
34
+ "self_attn.o_proj"
 
35
  ],
36
  "task_type": "CAUSAL_LM",
37
  "use_dora": false,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea388ee8a3e39281fb16b6b2d121b6e008adc5236a0d4067dec5befa3a352613
3
  size 1208679250
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a4920fe7351a48b7068a27932cd4dcefdf86106b1b02175d38a6d06eec7b18c
3
  size 1208679250