{ "best_metric": 0.8157061201962169, "best_model_checkpoint": "/content/result/unsup-simcse-roberta-large-semeval2016-laptops", "epoch": 30.0, "global_step": 1200, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 3.12, "eval_avg_sts": 0.7679912497622495, "eval_sickr_spearman": 0.7276956826491402, "eval_stsb_spearman": 0.8082868168753589, "step": 125 }, { "epoch": 6.25, "eval_avg_sts": 0.7686040961574563, "eval_sickr_spearman": 0.7215020721186955, "eval_stsb_spearman": 0.8157061201962169, "step": 250 }, { "epoch": 9.38, "eval_avg_sts": 0.7750921030285816, "eval_sickr_spearman": 0.740750295864512, "eval_stsb_spearman": 0.8094339101926511, "step": 375 }, { "epoch": 12.5, "learning_rate": 2.916666666666667e-05, "loss": 0.0019, "step": 500 }, { "epoch": 12.5, "eval_avg_sts": 0.7645168841110166, "eval_sickr_spearman": 0.7163253760678987, "eval_stsb_spearman": 0.8127083921541344, "step": 500 }, { "epoch": 15.62, "eval_avg_sts": 0.7631076478210288, "eval_sickr_spearman": 0.7148924642186953, "eval_stsb_spearman": 0.8113228314233623, "step": 625 }, { "epoch": 18.75, "eval_avg_sts": 0.7641477709859748, "eval_sickr_spearman": 0.7158616838146151, "eval_stsb_spearman": 0.8124338581573343, "step": 750 }, { "epoch": 21.88, "eval_avg_sts": 0.7603328876767634, "eval_sickr_spearman": 0.7131653618731838, "eval_stsb_spearman": 0.8075004134803432, "step": 875 }, { "epoch": 25.0, "learning_rate": 8.333333333333334e-06, "loss": 0.0004, "step": 1000 }, { "epoch": 25.0, "eval_avg_sts": 0.7593813987808072, "eval_sickr_spearman": 0.7131961017781041, "eval_stsb_spearman": 0.8055666957835104, "step": 1000 }, { "epoch": 25.0, "eval_avg_sts": 0.7593813987808072, "eval_sickr_spearman": 0.7131961017781041, "eval_stsb_spearman": 0.8055666957835104, "step": 1000 }, { "epoch": 28.12, "eval_avg_sts": 0.7595589061661745, "eval_sickr_spearman": 0.7140072029880896, "eval_stsb_spearman": 0.8051106093442593, "step": 1125 }, { "epoch": 30.0, "step": 1200, "train_runtime": 2594.2478, "train_samples_per_second": 0.463 } ], "max_steps": 1200, "num_train_epochs": 30, "total_flos": 10230181596610560, "trial_name": null, "trial_params": null }