system HF staff commited on
Commit
590af1c
1 Parent(s): 9ad4eca

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -9
config.json CHANGED
@@ -7,18 +7,16 @@
7
  "d_model": 1024,
8
  "decoder_start_token_id": 0,
9
  "dropout_rate": 0.1,
10
- "finetuning_task": null,
11
  "initializer_factor": 1.0,
12
- "is_decoder": false,
13
  "layer_norm_epsilon": 1e-06,
 
14
  "n_positions": 512,
15
  "num_heads": 16,
16
- "num_labels": 2,
17
  "num_layers": 24,
18
- "output_attentions": false,
19
- "output_hidden_states": false,
20
  "output_past": true,
21
- "pruned_heads": {},
22
  "relative_attention_num_buckets": 32,
23
  "task_specific_params": {
24
  "summarization": {
@@ -49,7 +47,5 @@
49
  "prefix": "translate English to Romanian: "
50
  }
51
  },
52
- "torchscript": false,
53
- "use_bfloat16": false,
54
  "vocab_size": 32128
55
- }
 
7
  "d_model": 1024,
8
  "decoder_start_token_id": 0,
9
  "dropout_rate": 0.1,
10
+ "eos_token_id": 1,
11
  "initializer_factor": 1.0,
12
+ "is_encoder_decoder": true,
13
  "layer_norm_epsilon": 1e-06,
14
+ "model_type": "t5",
15
  "n_positions": 512,
16
  "num_heads": 16,
 
17
  "num_layers": 24,
 
 
18
  "output_past": true,
19
+ "pad_token_id": 0,
20
  "relative_attention_num_buckets": 32,
21
  "task_specific_params": {
22
  "summarization": {
 
47
  "prefix": "translate English to Romanian: "
48
  }
49
  },
 
 
50
  "vocab_size": 32128
51
+ }