model_id = 'SpanBERT/spanbert-base-cased'
training_args = TrainingArguments( output_dir='ECO_SPANBERT_CASED_FINAL_2', evaluation_strategy="epoch", learning_rate=2e-5, per_device_train_batch_size=8, # SE HA DISMINUIDO per_device_eval_batch_size=8, num_train_epochs=32, logging_steps=10, warmup_steps=500, # AÑADIDO NUEVO PARA EVITAR SOBREAJUSTE weight_decay=0.01, save_strategy="epoch", load_best_model_at_end=True, push_to_hub=True )
Epoch Training Loss Validation Loss Precision Recall F1 Accuracy 1 1.519100 1.422570 0.000000 0.000000 0.000000 0.980338 2 0.234600 0.231327 0.000000 0.000000 0.000000 0.980338 3 0.190900 0.176004 0.000000 0.000000 0.000000 0.980338 4 0.171200 0.168424 0.000000 0.000000 0.000000 0.980338 5 0.110200 0.107751 0.190217 0.009437 0.017981 0.980422 6 0.089400 0.091258 0.264650 0.224050 0.242663 0.983040 7 0.068700 0.079950 0.375423 0.328660 0.350489 0.984175 8 0.043900 0.078727 0.395198 0.377191 0.385984 0.985377 9 0.038600 0.073655 0.381506 0.389323 0.385375 0.985542 10 0.029300 0.077241 0.415283 0.424912 0.420043 0.985482 11 0.027400 0.078488 0.431647 0.432462 0.432054 0.985610 12 0.027900 0.075835 0.421053 0.450795 0.435417 0.985396 13 0.021400 0.080580 0.431228 0.456457 0.443484 0.985846 14 0.018100 0.081772 0.445903 0.448908 0.447400 0.986270 15 0.012800 0.081291 0.429383 0.448369 0.438671 0.985848 16 0.014100 0.082664 0.429111 0.459423 0.443750 0.985404 17 0.011500 0.086055 0.448276 0.480183 0.463681 0.985750 18 0.009100 0.085849 0.451182 0.473443 0.462044 0.986091 19 0.006400 0.089485 0.446993 0.472904 0.459583 0.985992 20 0.006600 0.088353 0.455716 0.477218 0.466219 0.986067 21 0.007100 0.089834 0.474251 0.486654 0.480373 0.986350 22 0.005500 0.089703 0.446708 0.484767 0.464960 0.985970 23 0.003400 0.098736 0.461124 0.484497 0.472522 0.985809 24 0.004200 0.095882 0.462573 0.491507 0.476601 0.986134 25 0.005000 0.097225 0.469533 0.479914 0.474667 0.986032 26 0.003300 0.099022 0.466177 0.496091 0.480669 0.986213 27 0.004900 0.100886 0.463742 0.493125 0.477982 0.986165 28 0.003100 0.100305 0.479481 0.497708 0.488424 0.986468 29 0.003000 0.100852 0.472539 0.491777 0.481966 0.986455 30 0.003900 0.100632 0.472002 0.497708 0.484514 0.986548 31 0.003400 0.102785 0.462159 0.493934 0.477519 0.986141 32 0.003700 0.102922 0.470964 0.496360 0.483329 0.986299
- Downloads last month
- 2