danielhanchen commited on
Commit
a838637
1 Parent(s): 0128442

Upload GemmaForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -17,7 +17,7 @@
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 28,
19
  "num_key_value_heads": 16,
20
- "pad_token_id": 3,
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": null,
23
  "rope_theta": 10000.0,
 
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 28,
19
  "num_key_value_heads": 16,
20
+ "pad_token_id": 0,
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": null,
23
  "rope_theta": 10000.0,