Liu-Xiang commited on
Commit
f67b37f
1 Parent(s): f441c0e

Upload adapter_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. adapter_config.json +6 -4
adapter_config.json CHANGED
@@ -6,6 +6,7 @@
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
@@ -24,13 +25,14 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
- "q_proj",
28
  "down_proj",
29
- "v_proj",
 
30
  "gate_proj",
31
- "k_proj",
32
- "up_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
 
35
  "use_rslora": false
36
  }
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "k_proj",
29
  "down_proj",
30
+ "q_proj",
31
+ "up_proj",
32
  "gate_proj",
33
+ "v_proj"
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
+ "use_dora": false,
37
  "use_rslora": false
38
  }