praysimanjuntak commited on
Commit
498ae41
1 Parent(s): a166c99

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -152
config.json DELETED
@@ -1,152 +0,0 @@
1
- {
2
- "_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
3
- "architectures": [
4
- "Phi3ForCausalLM"
5
- ],
6
- "attention_dropout": 0.0,
7
- "auto_map": {
8
- "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
9
- "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
10
- },
11
- "bos_token_id": 1,
12
- "embd_pdrop": 0.0,
13
- "eos_token_id": 32000,
14
- "freeze_mm_mlp_adapter": false,
15
- "hidden_act": "silu",
16
- "hidden_size": 3072,
17
- "image_aspect_ratio": "pad",
18
- "initializer_range": 0.02,
19
- "intermediate_size": 8192,
20
- "max_position_embeddings": 131072,
21
- "mm_hidden_size": 1024,
22
- "mm_patch_merge_type": "flat",
23
- "mm_projector_lr": 2e-05,
24
- "mm_projector_type": "mlp2x_gelu",
25
- "mm_use_im_patch_token": false,
26
- "mm_use_im_start_end": false,
27
- "mm_vision_select_feature": "patch",
28
- "mm_vision_select_layer": -2,
29
- "mm_vision_tower": "openai/clip-vit-large-patch14-336",
30
- "model_type": "llava_phi",
31
- "num_attention_heads": 32,
32
- "num_hidden_layers": 32,
33
- "num_key_value_heads": 32,
34
- "original_max_position_embeddings": 4096,
35
- "pad_token_id": 32000,
36
- "resid_pdrop": 0.0,
37
- "rms_norm_eps": 1e-05,
38
- "rope_scaling": {
39
- "long_factor": [
40
- 1.0299999713897705,
41
- 1.0499999523162842,
42
- 1.0499999523162842,
43
- 1.0799999237060547,
44
- 1.2299998998641968,
45
- 1.2299998998641968,
46
- 1.2999999523162842,
47
- 1.4499999284744263,
48
- 1.5999999046325684,
49
- 1.6499998569488525,
50
- 1.8999998569488525,
51
- 2.859999895095825,
52
- 3.68999981880188,
53
- 5.419999599456787,
54
- 5.489999771118164,
55
- 5.489999771118164,
56
- 9.09000015258789,
57
- 11.579999923706055,
58
- 15.65999984741211,
59
- 15.769999504089355,
60
- 15.789999961853027,
61
- 18.360000610351562,
62
- 21.989999771118164,
63
- 23.079999923706055,
64
- 30.009998321533203,
65
- 32.35000228881836,
66
- 32.590003967285156,
67
- 35.56000518798828,
68
- 39.95000457763672,
69
- 53.840003967285156,
70
- 56.20000457763672,
71
- 57.95000457763672,
72
- 59.29000473022461,
73
- 59.77000427246094,
74
- 59.920005798339844,
75
- 61.190006256103516,
76
- 61.96000671386719,
77
- 62.50000762939453,
78
- 63.3700065612793,
79
- 63.48000717163086,
80
- 63.48000717163086,
81
- 63.66000747680664,
82
- 63.850006103515625,
83
- 64.08000946044922,
84
- 64.760009765625,
85
- 64.80001068115234,
86
- 64.81001281738281,
87
- 64.81001281738281
88
- ],
89
- "short_factor": [
90
- 1.05,
91
- 1.05,
92
- 1.05,
93
- 1.1,
94
- 1.1,
95
- 1.1500000000000001,
96
- 1.2000000000000002,
97
- 1.2500000000000002,
98
- 1.3000000000000003,
99
- 1.3500000000000003,
100
- 1.5000000000000004,
101
- 2.000000000000001,
102
- 2.000000000000001,
103
- 2.000000000000001,
104
- 2.000000000000001,
105
- 2.000000000000001,
106
- 2.000000000000001,
107
- 2.000000000000001,
108
- 2.000000000000001,
109
- 2.000000000000001,
110
- 2.000000000000001,
111
- 2.000000000000001,
112
- 2.000000000000001,
113
- 2.000000000000001,
114
- 2.000000000000001,
115
- 2.000000000000001,
116
- 2.000000000000001,
117
- 2.000000000000001,
118
- 2.000000000000001,
119
- 2.000000000000001,
120
- 2.000000000000001,
121
- 2.000000000000001,
122
- 2.0500000000000007,
123
- 2.0500000000000007,
124
- 2.0500000000000007,
125
- 2.1000000000000005,
126
- 2.1000000000000005,
127
- 2.1000000000000005,
128
- 2.1500000000000004,
129
- 2.1500000000000004,
130
- 2.3499999999999996,
131
- 2.549999999999999,
132
- 2.5999999999999988,
133
- 2.5999999999999988,
134
- 2.7499999999999982,
135
- 2.849999999999998,
136
- 2.849999999999998,
137
- 2.9499999999999975
138
- ],
139
- "type": "su"
140
- },
141
- "rope_theta": 10000.0,
142
- "sliding_window": 262144,
143
- "tie_word_embeddings": false,
144
- "tokenizer_model_max_length": 2048,
145
- "tokenizer_padding_side": "right",
146
- "torch_dtype": "bfloat16",
147
- "transformers_version": "4.37.2",
148
- "tune_mm_mlp_adapter": false,
149
- "use_cache": true,
150
- "use_mm_proj": true,
151
- "vocab_size": 32064
152
- }