lailamt commited on
Commit
b9599f5
1 Parent(s): 69591fb

Training in progress, step 100

Browse files
Files changed (4) hide show
  1. README.md +25 -32
  2. config.json +14 -18
  3. model.safetensors +2 -2
  4. training_args.bin +1 -1
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  license: mit
3
- base_model: PORTULAN/albertina-ptbr-base
4
  tags:
5
  - generated_from_trainer
6
  model-index:
@@ -13,9 +13,9 @@ should probably proofread and complete it, then remove this comment. -->
13
 
14
  # e3_lr2e-05
15
 
16
- This model is a fine-tuned version of [PORTULAN/albertina-ptbr-base](https://huggingface.co/PORTULAN/albertina-ptbr-base) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.9338
19
 
20
  ## Model description
21
 
@@ -49,39 +49,32 @@ The following hyperparameters were used during training:
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:------:|:----:|:---------------:|
52
- | 1.4061 | 0.1040 | 100 | 1.1920 |
53
- | 1.2553 | 0.2080 | 200 | 1.1209 |
54
- | 1.2102 | 0.3120 | 300 | 1.0971 |
55
- | 1.1773 | 0.4160 | 400 | 1.0738 |
56
- | 1.1432 | 0.5200 | 500 | 1.0481 |
57
- | 1.1302 | 0.6240 | 600 | 1.0320 |
58
- | 1.1153 | 0.7280 | 700 | 1.0243 |
59
- | 1.1057 | 0.8320 | 800 | 1.0107 |
60
- | 1.0976 | 0.9360 | 900 | 1.0002 |
61
- | 1.0889 | 1.0400 | 1000 | 0.9907 |
62
- | 1.0797 | 1.1440 | 1100 | 0.9836 |
63
- | 1.0633 | 1.2480 | 1200 | 0.9788 |
64
- | 1.0582 | 1.3521 | 1300 | 0.9761 |
65
- | 1.0578 | 1.4561 | 1400 | 0.9635 |
66
- | 1.0423 | 1.5601 | 1500 | 0.9601 |
67
- | 1.0411 | 1.6641 | 1600 | 0.9578 |
68
- | 1.0406 | 1.7681 | 1700 | 0.9527 |
69
- | 1.0436 | 1.8721 | 1800 | 0.9520 |
70
- | 1.0363 | 1.9761 | 1900 | 0.9443 |
71
- | 1.0274 | 2.0801 | 2000 | 0.9419 |
72
- | 1.03 | 2.1841 | 2100 | 0.9417 |
73
- | 1.0232 | 2.2881 | 2200 | 0.9392 |
74
- | 1.0237 | 2.3921 | 2300 | 0.9374 |
75
- | 1.0199 | 2.4961 | 2400 | 0.9354 |
76
- | 1.0095 | 2.6001 | 2500 | 0.9399 |
77
- | 1.0145 | 2.7041 | 2600 | 0.9343 |
78
- | 1.0179 | 2.8081 | 2700 | 0.9297 |
79
- | 1.0148 | 2.9121 | 2800 | 0.9328 |
80
 
81
 
82
  ### Framework versions
83
 
84
  - Transformers 4.41.2
85
  - Pytorch 2.3.0+cu121
86
- - Datasets 2.20.0
87
  - Tokenizers 0.19.1
 
1
  ---
2
  license: mit
3
+ base_model: FacebookAI/xlm-roberta-base
4
  tags:
5
  - generated_from_trainer
6
  model-index:
 
13
 
14
  # e3_lr2e-05
15
 
16
+ This model is a fine-tuned version of [FacebookAI/xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 1.6436
19
 
20
  ## Model description
21
 
 
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:------:|:----:|:---------------:|
52
+ | 2.9961 | 0.1404 | 100 | 1.9416 |
53
+ | 2.0472 | 0.2808 | 200 | 1.8589 |
54
+ | 1.9766 | 0.4212 | 300 | 1.8095 |
55
+ | 1.9319 | 0.5616 | 400 | 1.7736 |
56
+ | 1.897 | 0.7021 | 500 | 1.7447 |
57
+ | 1.8743 | 0.8425 | 600 | 1.7370 |
58
+ | 1.86 | 0.9829 | 700 | 1.7156 |
59
+ | 1.8431 | 1.1233 | 800 | 1.7071 |
60
+ | 1.8217 | 1.2637 | 900 | 1.6939 |
61
+ | 1.8212 | 1.4041 | 1000 | 1.6900 |
62
+ | 1.8053 | 1.5445 | 1100 | 1.6774 |
63
+ | 1.7899 | 1.6849 | 1200 | 1.6736 |
64
+ | 1.799 | 1.8254 | 1300 | 1.6644 |
65
+ | 1.7845 | 1.9658 | 1400 | 1.6559 |
66
+ | 1.7704 | 2.1062 | 1500 | 1.6531 |
67
+ | 1.776 | 2.2466 | 1600 | 1.6528 |
68
+ | 1.773 | 2.3870 | 1700 | 1.6417 |
69
+ | 1.7632 | 2.5274 | 1800 | 1.6452 |
70
+ | 1.7451 | 2.6678 | 1900 | 1.6460 |
71
+ | 1.7505 | 2.8088 | 2000 | 1.6455 |
72
+ | 1.7602 | 2.9492 | 2100 | 1.6399 |
 
 
 
 
 
 
 
73
 
74
 
75
  ### Framework versions
76
 
77
  - Transformers 4.41.2
78
  - Pytorch 2.3.0+cu121
79
+ - Datasets 2.19.2
80
  - Tokenizers 0.19.1
config.json CHANGED
@@ -1,32 +1,28 @@
1
  {
2
- "_name_or_path": "PORTULAN/albertina-ptbr-base",
3
  "architectures": [
4
- "DebertaForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
 
 
 
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3072,
12
- "layer_norm_eps": 1e-07,
13
- "max_position_embeddings": 512,
14
- "max_relative_positions": -1,
15
- "model_type": "deberta",
16
  "num_attention_heads": 12,
17
  "num_hidden_layers": 12,
18
- "pad_token_id": 0,
19
- "pooler_dropout": 0,
20
- "pooler_hidden_act": "gelu",
21
- "pooler_hidden_size": 768,
22
- "pos_att_type": [
23
- "c2p",
24
- "p2c"
25
- ],
26
- "position_biased_input": false,
27
- "relative_attention": true,
28
  "torch_dtype": "float32",
29
  "transformers_version": "4.41.2",
30
- "type_vocab_size": 0,
31
- "vocab_size": 50265
 
32
  }
 
1
  {
2
+ "_name_or_path": "FacebookAI/xlm-roberta-base",
3
  "architectures": [
4
+ "XLMRobertaForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "xlm-roberta",
 
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 12,
20
+ "output_past": true,
21
+ "pad_token_id": 1,
22
+ "position_embedding_type": "absolute",
 
 
 
 
 
 
 
23
  "torch_dtype": "float32",
24
  "transformers_version": "4.41.2",
25
+ "type_vocab_size": 1,
26
+ "use_cache": true,
27
+ "vocab_size": 250002
28
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5a85a6a8b3af6395d11c94e5f6e370c8e87f7ef519f178ecd69693f7dc3f72c
3
- size 557000804
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec0c1da0e2c253a93f182aa3b1f765b438375de7a550b7d6156b9bb8eeaae8d5
3
+ size 1113205088
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6625608206e0211e37aea30e8795c5d9ac17a3078892d73782e5c65f5ae941b0
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8176ab32e064b0b381bdbb0bd9a1484f61332adc97cc10eed787cd57ad090710
3
  size 5240