Leyo commited on
Commit
8254757
1 Parent(s): 20f7712

fix config

Browse files
Files changed (1) hide show
  1. config.json +2 -5
config.json CHANGED
@@ -8,19 +8,16 @@
8
  "AutoModel": "HuggingFaceM4/siglip-so400m-14-384-flash-attn2--modeling_siglip.SiglipModel"
9
  },
10
  "initializer_factor": 1.0,
11
- "logit_scale_init_value": 2.6592,
12
  "model_type": "siglip",
13
- "projection_dim": 512,
14
  "text_config": {
15
  "hidden_size": 1152,
16
  "intermediate_size": 4304,
17
  "model_type": "siglip_text_model",
18
  "num_attention_heads": 16,
19
- "num_hidden_layers": 27,
20
- "vocab_size": 32000
21
  },
22
  "torch_dtype": "float32",
23
- "transformers_version": "4.35.2",
24
  "vision_config": {
25
  "hidden_size": 1152,
26
  "image_size": 384,
 
8
  "AutoModel": "HuggingFaceM4/siglip-so400m-14-384-flash-attn2--modeling_siglip.SiglipModel"
9
  },
10
  "initializer_factor": 1.0,
 
11
  "model_type": "siglip",
 
12
  "text_config": {
13
  "hidden_size": 1152,
14
  "intermediate_size": 4304,
15
  "model_type": "siglip_text_model",
16
  "num_attention_heads": 16,
17
+ "num_hidden_layers": 27
 
18
  },
19
  "torch_dtype": "float32",
20
+ "transformers_version": "4.37.0.dev0",
21
  "vision_config": {
22
  "hidden_size": 1152,
23
  "image_size": 384,