yuhaofeng-shiba commited on
Commit
978ce63
1 Parent(s): 70b9b87

Upload llama_13b_config.json

Browse files
Files changed (1) hide show
  1. config/llama_13b_config.json +21 -0
config/llama_13b_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "emb_size": 5120,
3
+ "feedforward_size": 13824,
4
+ "hidden_size": 5120,
5
+ "hidden_act": "silu",
6
+ "heads_num": 40,
7
+ "layers_num": 40,
8
+ "dropout": 0.1,
9
+ "data_processor": "lm",
10
+ "max_seq_length": 2048,
11
+ "embedding": ["word"],
12
+ "remove_transformer_bias": true,
13
+ "remove_embedding_layernorm": true,
14
+ "rotary_position_embedding": true,
15
+ "encoder": "transformer",
16
+ "feed_forward": "gated",
17
+ "mask": "causal",
18
+ "layernorm_positioning": "pre",
19
+ "layernorm": "rms",
20
+ "target": ["lm"]
21
+ }