tiendung commited on
Commit
cbc6860
1 Parent(s): ed871ee

add base_20-percent

Browse files
base_20-percent/discriminator.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6625c5f902a0861804e27a60f08d3bc0278b64ba11345b86a6e79362f892420c
3
+ size 247615940
base_20-percent/generator.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e6a55ce7c6096edaf28572f39e43e27228d6837acd77a1d9e0601162f9fe663
3
+ size 124844243
base_20-percent/model_config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attention_probs_dropout_prob": 0.1,
3
+ "discriminator": {
4
+ "attention_head_size": 64,
5
+ "attention_probs_dropout_prob": 0.1,
6
+ "hidden_act": "gelu",
7
+ "hidden_dropout_prob": 0.1,
8
+ "hidden_size": 768,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 3072,
11
+ "layer_norm_eps": 1e-07,
12
+ "max_position_embeddings": 512,
13
+ "max_relative_positions": -1,
14
+ "norm_rel_ebd": "layer_norm",
15
+ "num_attention_heads": 12,
16
+ "num_hidden_layers": 12,
17
+ "pos_att_type": "p2c|c2p",
18
+ "position_biased_input": false,
19
+ "position_buckets": 256,
20
+ "relative_attention": true,
21
+ "share_att_key": true,
22
+ "type_vocab_size": 0,
23
+ "vocab_size": 24064
24
+ },
25
+ "embedding_sharing": "gdes",
26
+ "generator": {
27
+ "attention_head_size": 64,
28
+ "attention_probs_dropout_prob": 0.1,
29
+ "hidden_act": "gelu",
30
+ "hidden_dropout_prob": 0.1,
31
+ "hidden_size": 768,
32
+ "initializer_range": 0.02,
33
+ "intermediate_size": 3072,
34
+ "layer_norm_eps": 1e-07,
35
+ "max_position_embeddings": 512,
36
+ "max_relative_positions": -1,
37
+ "norm_rel_ebd": "layer_norm",
38
+ "num_attention_heads": 12,
39
+ "num_hidden_layers": 6,
40
+ "pos_att_type": "p2c|c2p",
41
+ "position_biased_input": false,
42
+ "position_buckets": 256,
43
+ "relative_attention": true,
44
+ "share_att_key": true,
45
+ "type_vocab_size": 0,
46
+ "vocab_size": 24064
47
+ },
48
+ "hidden_act": "gelu",
49
+ "hidden_dropout_prob": 0.1,
50
+ "hidden_size": 768,
51
+ "initializer_range": 0.02,
52
+ "intermediate_size": 3072,
53
+ "layer_norm_eps": 1e-07,
54
+ "max_position_embeddings": 512,
55
+ "num_attention_heads": 12,
56
+ "num_hidden_layers": 12,
57
+ "padding_idx": 0,
58
+ "type_vocab_size": 0,
59
+ "vocab_size": -1
60
+ }
61
+
base_20-percent/nohup.out ADDED
The diff for this file is too large to render. See raw diff