File size: 331 Bytes
4cefba7
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
    "epoch": 1.9998993609419817,
    "num_input_tokens_seen": 162790400,
    "total_flos": 1.059416318592e+16,
    "train_loss": 4.377665276304727,
    "train_runtime": 2972.3443,
    "train_samples": 158982,
    "train_samples_per_second": 106.974,
    "train_steps_per_second": 0.836,
    "train_tokens_per_second": 54770.764
}