Aryanne commited on
Commit
1aedbf0
1 Parent(s): 668b672

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -1
config.json CHANGED
@@ -5,6 +5,11 @@
5
  "PhiForCausalLM"
6
  ],
7
  "attn_pdrop": 0.0,
 
 
 
 
 
8
  "embd_pdrop": 0.0,
9
  "flash_attn": false,
10
  "flash_rotary": false,
@@ -26,4 +31,4 @@
26
  "transformers_version": "4.35.2",
27
  "use_cache": true,
28
  "vocab_size": 51200
29
- }
 
5
  "PhiForCausalLM"
6
  ],
7
  "attn_pdrop": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "Yhyu13/LMCocktail-phi-2-v1--configuration_phi.PhiConfig",
10
+ "AutoModel": "Yhyu13/LMCocktail-phi-2-v1--modeling_phi.PhiForCausalLM",
11
+ "AutoModelForCausalLM": "Yhyu13/LMCocktail-phi-2-v1--modeling_phi.PhiForCausalLM"
12
+ },
13
  "embd_pdrop": 0.0,
14
  "flash_attn": false,
15
  "flash_rotary": false,
 
31
  "transformers_version": "4.35.2",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
+ }