ibadami commited on
Commit
84be664
1 Parent(s): fbdf05e

Upload 5 files

Browse files
adapter_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 256,
14
+ "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
18
+ "peft_type": "LORA",
19
+ "r": 128,
20
+ "rank_pattern": {},
21
+ "revision": null,
22
+ "target_modules": [
23
+ "o_proj",
24
+ "gate_up_proj",
25
+ "qkv_proj",
26
+ "down_proj"
27
+ ],
28
+ "task_type": "CAUSAL_LM",
29
+ "use_dora": false,
30
+ "use_rslora": false
31
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4cda55153d26ccc463cf547c783cc978a4833c28d3c2dedddaa07e3fdeb8c29
3
+ size 402688552
config.json ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 32000,
14
+ "freeze_mm_mlp_adapter": false,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 3072,
17
+ "image_aspect_ratio": "pad",
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 8192,
20
+ "max_position_embeddings": 131072,
21
+ "mm_hidden_size": 1024,
22
+ "mm_patch_merge_type": "flat",
23
+ "mm_projector_lr": 2e-05,
24
+ "mm_projector_type": "mlp2x_gelu",
25
+ "mm_use_im_patch_token": false,
26
+ "mm_use_im_start_end": false,
27
+ "mm_vision_select_feature": "patch",
28
+ "mm_vision_select_layer": -2,
29
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
30
+ "model_type": "llava_phi",
31
+ "num_attention_heads": 32,
32
+ "num_hidden_layers": 32,
33
+ "num_key_value_heads": 32,
34
+ "original_max_position_embeddings": 4096,
35
+ "pad_token_id": 32000,
36
+ "resid_pdrop": 0.0,
37
+ "rms_norm_eps": 1e-05,
38
+ "rope_scaling": {
39
+ "long_factor": [
40
+ 1.0299999713897705,
41
+ 1.0499999523162842,
42
+ 1.0499999523162842,
43
+ 1.0799999237060547,
44
+ 1.2299998998641968,
45
+ 1.2299998998641968,
46
+ 1.2999999523162842,
47
+ 1.4499999284744263,
48
+ 1.5999999046325684,
49
+ 1.6499998569488525,
50
+ 1.8999998569488525,
51
+ 2.859999895095825,
52
+ 3.68999981880188,
53
+ 5.419999599456787,
54
+ 5.489999771118164,
55
+ 5.489999771118164,
56
+ 9.09000015258789,
57
+ 11.579999923706055,
58
+ 15.65999984741211,
59
+ 15.769999504089355,
60
+ 15.789999961853027,
61
+ 18.360000610351562,
62
+ 21.989999771118164,
63
+ 23.079999923706055,
64
+ 30.009998321533203,
65
+ 32.35000228881836,
66
+ 32.590003967285156,
67
+ 35.56000518798828,
68
+ 39.95000457763672,
69
+ 53.840003967285156,
70
+ 56.20000457763672,
71
+ 57.95000457763672,
72
+ 59.29000473022461,
73
+ 59.77000427246094,
74
+ 59.920005798339844,
75
+ 61.190006256103516,
76
+ 61.96000671386719,
77
+ 62.50000762939453,
78
+ 63.3700065612793,
79
+ 63.48000717163086,
80
+ 63.48000717163086,
81
+ 63.66000747680664,
82
+ 63.850006103515625,
83
+ 64.08000946044922,
84
+ 64.760009765625,
85
+ 64.80001068115234,
86
+ 64.81001281738281,
87
+ 64.81001281738281
88
+ ],
89
+ "short_factor": [
90
+ 1.05,
91
+ 1.05,
92
+ 1.05,
93
+ 1.1,
94
+ 1.1,
95
+ 1.1500000000000001,
96
+ 1.2000000000000002,
97
+ 1.2500000000000002,
98
+ 1.3000000000000003,
99
+ 1.3500000000000003,
100
+ 1.5000000000000004,
101
+ 2.000000000000001,
102
+ 2.000000000000001,
103
+ 2.000000000000001,
104
+ 2.000000000000001,
105
+ 2.000000000000001,
106
+ 2.000000000000001,
107
+ 2.000000000000001,
108
+ 2.000000000000001,
109
+ 2.000000000000001,
110
+ 2.000000000000001,
111
+ 2.000000000000001,
112
+ 2.000000000000001,
113
+ 2.000000000000001,
114
+ 2.000000000000001,
115
+ 2.000000000000001,
116
+ 2.000000000000001,
117
+ 2.000000000000001,
118
+ 2.000000000000001,
119
+ 2.000000000000001,
120
+ 2.000000000000001,
121
+ 2.000000000000001,
122
+ 2.0500000000000007,
123
+ 2.0500000000000007,
124
+ 2.0500000000000007,
125
+ 2.1000000000000005,
126
+ 2.1000000000000005,
127
+ 2.1000000000000005,
128
+ 2.1500000000000004,
129
+ 2.1500000000000004,
130
+ 2.3499999999999996,
131
+ 2.549999999999999,
132
+ 2.5999999999999988,
133
+ 2.5999999999999988,
134
+ 2.7499999999999982,
135
+ 2.849999999999998,
136
+ 2.849999999999998,
137
+ 2.9499999999999975
138
+ ],
139
+ "type": "su"
140
+ },
141
+ "rope_theta": 10000.0,
142
+ "sliding_window": 262144,
143
+ "tie_word_embeddings": false,
144
+ "tokenizer_model_max_length": 2048,
145
+ "tokenizer_padding_side": "right",
146
+ "torch_dtype": "bfloat16",
147
+ "transformers_version": "4.37.2",
148
+ "tune_mm_mlp_adapter": false,
149
+ "use_cache": true,
150
+ "use_mm_proj": true,
151
+ "vocab_size": 32064
152
+ }
non_lora_trainables.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:562b1d2d01126796927c7cc639738055b09cf9cc587af2fefb761f1a8eb0aa0f
3
+ size 25180336
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff