KaleiNeely commited on
Commit
77919cc
1 Parent(s): 3a9a7ca

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -7
config.json CHANGED
@@ -1,25 +1,24 @@
1
  {
2
  "architectures": [
3
- "RwkvForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
- "AutoModelForCausalLM": "modeling_rwkv5.RwkvForCausalLM"
8
  },
9
- "attention_hidden_size": 2560,
10
  "bos_token_id": 0,
11
  "context_length": 4096,
12
  "eos_token_id": 0,
13
  "head_size": 64,
14
- "hidden_size": 2560,
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
18
- "model_version": "5_2",
19
- "num_hidden_layers": 32,
20
  "rescale_every": 6,
21
  "tie_word_embeddings": false,
22
- "transformers_version": "4.34.0",
23
  "use_cache": true,
24
  "vocab_size": 65536
25
  }
 
1
  {
2
  "architectures": [
3
+ "Rwkv5ForCausalLM"
4
  ],
5
  "auto_map": {
6
  "AutoConfig": "configuration_rwkv5.Rwkv5Config",
7
+ "AutoModelForCausalLM": "modeling_rwkv5.Rwkv5ForCausalLM"
8
  },
9
+ "attention_hidden_size": 2048,
10
  "bos_token_id": 0,
11
  "context_length": 4096,
12
  "eos_token_id": 0,
13
  "head_size": 64,
14
+ "hidden_size": 2048,
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "model_type": "rwkv5",
18
+ "num_hidden_layers": 24,
 
19
  "rescale_every": 6,
20
  "tie_word_embeddings": false,
21
+ "transformers_version": "4.33.1",
22
  "use_cache": true,
23
  "vocab_size": 65536
24
  }