shubham008 commited on
Commit
ba445af
1 Parent(s): 2dd2b12

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -10
config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
- "_name_or_path": "shubham008/phi-1_5-finetuned-gsm8k",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
6
- "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "microsoft/phi-1_5--configuration_phi.PhiConfig",
9
- "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM"
10
  },
 
11
  "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": null,
@@ -20,16 +20,15 @@
20
  "model_type": "phi",
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 24,
23
- "num_key_value_heads": 32,
24
  "partial_rotary_factor": 0.5,
25
  "qk_layernorm": false,
26
  "resid_pdrop": 0.0,
27
  "rope_scaling": null,
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
- "torch_dtype": "float32",
31
- "transformers_version": "4.38.2",
32
  "use_cache": true,
33
- "vocab_size": 51200,
34
- "trust_remote_code": true
35
- }
 
1
  {
2
+ "_name_or_path": "microsoft/phi-1_5",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
 
6
  "auto_map": {
7
+ "AutoConfig": "configuration_phi.PhiConfig",
8
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
9
  },
10
+ "attention_dropout": 0.0,
11
  "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": null,
 
20
  "model_type": "phi",
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 24,
23
+ "num_key_value_heads": null,
24
  "partial_rotary_factor": 0.5,
25
  "qk_layernorm": false,
26
  "resid_pdrop": 0.0,
27
  "rope_scaling": null,
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
+ "torch_dtype": "float16",
31
+ "transformers_version": "4.37.0",
32
  "use_cache": true,
33
+ "vocab_size": 51200
34
+ }