| [ | |
| { | |
| "loss": 1.6234498291015624, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "epoch": 0.2222222222222222, | |
| "total_flos": 9511281623040000, | |
| "step": 500 | |
| }, | |
| { | |
| "loss": 1.4528057861328125, | |
| "learning_rate": 3.888888888888889e-05, | |
| "epoch": 0.4444444444444444, | |
| "total_flos": 19022563246080000, | |
| "step": 1000 | |
| }, | |
| { | |
| "loss": 1.400372802734375, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "epoch": 0.6666666666666666, | |
| "total_flos": 28533844869120000, | |
| "step": 1500 | |
| }, | |
| { | |
| "loss": 1.364806640625, | |
| "learning_rate": 2.777777777777778e-05, | |
| "epoch": 0.8888888888888888, | |
| "total_flos": 38045126492160000, | |
| "step": 2000 | |
| }, | |
| { | |
| "loss": 1.25629931640625, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "epoch": 1.1111111111111112, | |
| "total_flos": 47556408115200000, | |
| "step": 2500 | |
| }, | |
| { | |
| "loss": 1.16014453125, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "epoch": 1.3333333333333333, | |
| "total_flos": 57067689738240000, | |
| "step": 3000 | |
| }, | |
| { | |
| "loss": 1.15758203125, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "epoch": 1.5555555555555556, | |
| "total_flos": 66578971361280000, | |
| "step": 3500 | |
| }, | |
| { | |
| "loss": 1.133671875, | |
| "learning_rate": 5.555555555555556e-06, | |
| "epoch": 1.7777777777777777, | |
| "total_flos": 76090252984320000, | |
| "step": 4000 | |
| }, | |
| { | |
| "loss": 1.144353515625, | |
| "learning_rate": 0.0, | |
| "epoch": 2.0, | |
| "total_flos": 85601534607360000, | |
| "step": 4500 | |
| } | |
| ] |