File size: 232 Bytes
f2a7352
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
{
    "epoch": 13.0,
    "total_flos": 4.79429356683264e+16,
    "train_loss": 0.810044450592671,
    "train_runtime": 2019.2323,
    "train_samples": 136,
    "train_samples_per_second": 3.368,
    "train_steps_per_second": 0.421
}