|
{ |
|
"best_metric": 28.965267965695134, |
|
"best_model_checkpoint": "./checkpoint-5000", |
|
"epoch": 70.4225352112676, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4e-08, |
|
"loss": 1.4431, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.4e-08, |
|
"loss": 1.4443, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.44e-07, |
|
"loss": 1.4288, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.94e-07, |
|
"loss": 1.3802, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.4399999999999996e-07, |
|
"loss": 1.311, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.9399999999999996e-07, |
|
"loss": 1.2618, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.4399999999999996e-07, |
|
"loss": 1.1683, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.94e-07, |
|
"loss": 1.1132, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.44e-07, |
|
"loss": 1.0068, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.94e-07, |
|
"loss": 0.8763, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 5.44e-07, |
|
"loss": 0.7522, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 5.939999999999999e-07, |
|
"loss": 0.6957, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 6.44e-07, |
|
"loss": 0.6593, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 6.939999999999999e-07, |
|
"loss": 0.6523, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 7.44e-07, |
|
"loss": 0.6342, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 7.94e-07, |
|
"loss": 0.6159, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 8.439999999999999e-07, |
|
"loss": 0.6051, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 8.939999999999999e-07, |
|
"loss": 0.6003, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 9.439999999999999e-07, |
|
"loss": 0.586, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 9.94e-07, |
|
"loss": 0.5745, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 9.95111111111111e-07, |
|
"loss": 0.5695, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 9.895555555555554e-07, |
|
"loss": 0.5616, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 9.84e-07, |
|
"loss": 0.549, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 9.784444444444444e-07, |
|
"loss": 0.5459, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 9.728888888888888e-07, |
|
"loss": 0.5385, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 9.673333333333332e-07, |
|
"loss": 0.5365, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 9.617777777777776e-07, |
|
"loss": 0.5282, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 9.562222222222223e-07, |
|
"loss": 0.5247, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 9.506666666666667e-07, |
|
"loss": 0.5142, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 9.451111111111111e-07, |
|
"loss": 0.5033, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 9.395555555555556e-07, |
|
"loss": 0.5205, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 9.34e-07, |
|
"loss": 0.4991, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 9.284444444444444e-07, |
|
"loss": 0.4977, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 9.228888888888888e-07, |
|
"loss": 0.5025, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 9.173333333333333e-07, |
|
"loss": 0.4888, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 9.117777777777778e-07, |
|
"loss": 0.4866, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 9.062222222222222e-07, |
|
"loss": 0.4818, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 9.006666666666666e-07, |
|
"loss": 0.4753, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 8.95111111111111e-07, |
|
"loss": 0.4743, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 8.895555555555555e-07, |
|
"loss": 0.4763, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"eval_loss": 0.5686116814613342, |
|
"eval_runtime": 236.519, |
|
"eval_samples_per_second": 38.356, |
|
"eval_steps_per_second": 0.3, |
|
"eval_wer": 31.31140538231525, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 8.839999999999999e-07, |
|
"loss": 0.4698, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 8.784444444444444e-07, |
|
"loss": 0.4673, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 8.728888888888889e-07, |
|
"loss": 0.4527, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"learning_rate": 8.673333333333332e-07, |
|
"loss": 0.4653, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 8.617777777777777e-07, |
|
"loss": 0.457, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 8.562222222222222e-07, |
|
"loss": 0.4479, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 8.506666666666667e-07, |
|
"loss": 0.4474, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 8.451111111111111e-07, |
|
"loss": 0.4468, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 8.395555555555556e-07, |
|
"loss": 0.4468, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 8.34e-07, |
|
"loss": 0.438, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 8.284444444444444e-07, |
|
"loss": 0.4447, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 8.228888888888889e-07, |
|
"loss": 0.4327, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 8.173333333333333e-07, |
|
"loss": 0.4316, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 8.117777777777778e-07, |
|
"loss": 0.4351, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 8.062222222222221e-07, |
|
"loss": 0.4282, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 8.006666666666666e-07, |
|
"loss": 0.4253, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"learning_rate": 7.95111111111111e-07, |
|
"loss": 0.4213, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"learning_rate": 7.895555555555555e-07, |
|
"loss": 0.4166, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 20.77, |
|
"learning_rate": 7.84e-07, |
|
"loss": 0.4257, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"learning_rate": 7.784444444444444e-07, |
|
"loss": 0.4163, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 21.48, |
|
"learning_rate": 7.728888888888888e-07, |
|
"loss": 0.4112, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"learning_rate": 7.673333333333332e-07, |
|
"loss": 0.4171, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 22.18, |
|
"learning_rate": 7.617777777777778e-07, |
|
"loss": 0.4077, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 22.54, |
|
"learning_rate": 7.562222222222222e-07, |
|
"loss": 0.4052, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 22.89, |
|
"learning_rate": 7.506666666666667e-07, |
|
"loss": 0.4077, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"learning_rate": 7.451111111111111e-07, |
|
"loss": 0.402, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 23.59, |
|
"learning_rate": 7.395555555555555e-07, |
|
"loss": 0.4024, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 23.94, |
|
"learning_rate": 7.34e-07, |
|
"loss": 0.4018, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"learning_rate": 7.284444444444444e-07, |
|
"loss": 0.4014, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"learning_rate": 7.228888888888889e-07, |
|
"loss": 0.3975, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 7.173333333333333e-07, |
|
"loss": 0.3902, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 25.35, |
|
"learning_rate": 7.117777777777777e-07, |
|
"loss": 0.3928, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"learning_rate": 7.062222222222222e-07, |
|
"loss": 0.388, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 26.06, |
|
"learning_rate": 7.006666666666666e-07, |
|
"loss": 0.387, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"learning_rate": 6.951111111111111e-07, |
|
"loss": 0.389, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 26.76, |
|
"learning_rate": 6.895555555555555e-07, |
|
"loss": 0.3828, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 27.11, |
|
"learning_rate": 6.84e-07, |
|
"loss": 0.3846, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 27.46, |
|
"learning_rate": 6.784444444444443e-07, |
|
"loss": 0.3796, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 27.82, |
|
"learning_rate": 6.728888888888888e-07, |
|
"loss": 0.3855, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"learning_rate": 6.673333333333334e-07, |
|
"loss": 0.3784, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"eval_loss": 0.5349831581115723, |
|
"eval_runtime": 76.3989, |
|
"eval_samples_per_second": 118.745, |
|
"eval_steps_per_second": 0.929, |
|
"eval_wer": 30.069332632339897, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 28.52, |
|
"learning_rate": 6.617777777777778e-07, |
|
"loss": 0.3766, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 28.87, |
|
"learning_rate": 6.562222222222223e-07, |
|
"loss": 0.3725, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 29.23, |
|
"learning_rate": 6.506666666666666e-07, |
|
"loss": 0.3804, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 29.58, |
|
"learning_rate": 6.451111111111111e-07, |
|
"loss": 0.3759, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 29.93, |
|
"learning_rate": 6.395555555555555e-07, |
|
"loss": 0.3679, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 30.28, |
|
"learning_rate": 6.34e-07, |
|
"loss": 0.3644, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"learning_rate": 6.284444444444445e-07, |
|
"loss": 0.3696, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 30.99, |
|
"learning_rate": 6.228888888888889e-07, |
|
"loss": 0.3699, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 31.34, |
|
"learning_rate": 6.173333333333333e-07, |
|
"loss": 0.3638, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 31.69, |
|
"learning_rate": 6.117777777777777e-07, |
|
"loss": 0.3619, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 32.04, |
|
"learning_rate": 6.062222222222222e-07, |
|
"loss": 0.3659, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 32.39, |
|
"learning_rate": 6.006666666666666e-07, |
|
"loss": 0.3584, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 32.75, |
|
"learning_rate": 5.951111111111111e-07, |
|
"loss": 0.362, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 33.1, |
|
"learning_rate": 5.895555555555555e-07, |
|
"loss": 0.3655, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 33.45, |
|
"learning_rate": 5.839999999999999e-07, |
|
"loss": 0.3552, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 33.8, |
|
"learning_rate": 5.784444444444444e-07, |
|
"loss": 0.3518, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 34.15, |
|
"learning_rate": 5.728888888888888e-07, |
|
"loss": 0.3511, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 34.51, |
|
"learning_rate": 5.673333333333334e-07, |
|
"loss": 0.3578, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 34.86, |
|
"learning_rate": 5.617777777777778e-07, |
|
"loss": 0.3536, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 35.21, |
|
"learning_rate": 5.562222222222222e-07, |
|
"loss": 0.3476, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 35.56, |
|
"learning_rate": 5.506666666666666e-07, |
|
"loss": 0.3422, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 35.92, |
|
"learning_rate": 5.451111111111111e-07, |
|
"loss": 0.3547, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 36.27, |
|
"learning_rate": 5.395555555555556e-07, |
|
"loss": 0.3524, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 36.62, |
|
"learning_rate": 5.34e-07, |
|
"loss": 0.3481, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 36.97, |
|
"learning_rate": 5.284444444444445e-07, |
|
"loss": 0.3427, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 37.32, |
|
"learning_rate": 5.228888888888888e-07, |
|
"loss": 0.3436, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 37.68, |
|
"learning_rate": 5.173333333333333e-07, |
|
"loss": 0.3431, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 38.03, |
|
"learning_rate": 5.117777777777777e-07, |
|
"loss": 0.3404, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 38.38, |
|
"learning_rate": 5.062222222222222e-07, |
|
"loss": 0.3384, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 38.73, |
|
"learning_rate": 5.006666666666667e-07, |
|
"loss": 0.3465, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 39.08, |
|
"learning_rate": 4.951111111111111e-07, |
|
"loss": 0.3373, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 39.44, |
|
"learning_rate": 4.895555555555555e-07, |
|
"loss": 0.3346, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 39.79, |
|
"learning_rate": 4.839999999999999e-07, |
|
"loss": 0.3366, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 40.14, |
|
"learning_rate": 4.784444444444444e-07, |
|
"loss": 0.3401, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 40.49, |
|
"learning_rate": 4.728888888888889e-07, |
|
"loss": 0.3309, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 40.85, |
|
"learning_rate": 4.673333333333333e-07, |
|
"loss": 0.3399, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 41.2, |
|
"learning_rate": 4.6177777777777777e-07, |
|
"loss": 0.3296, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 41.55, |
|
"learning_rate": 4.5622222222222217e-07, |
|
"loss": 0.3325, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 41.9, |
|
"learning_rate": 4.506666666666666e-07, |
|
"loss": 0.3366, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 42.25, |
|
"learning_rate": 4.451111111111111e-07, |
|
"loss": 0.3286, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 42.25, |
|
"eval_loss": 0.5238807797431946, |
|
"eval_runtime": 76.2832, |
|
"eval_samples_per_second": 118.925, |
|
"eval_steps_per_second": 0.931, |
|
"eval_wer": 29.24128413235632, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 42.61, |
|
"learning_rate": 4.3955555555555554e-07, |
|
"loss": 0.3262, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 42.96, |
|
"learning_rate": 4.34e-07, |
|
"loss": 0.3296, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 43.31, |
|
"learning_rate": 4.2844444444444445e-07, |
|
"loss": 0.328, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 43.66, |
|
"learning_rate": 4.2288888888888886e-07, |
|
"loss": 0.3281, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 44.01, |
|
"learning_rate": 4.173333333333333e-07, |
|
"loss": 0.3267, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 44.37, |
|
"learning_rate": 4.1177777777777777e-07, |
|
"loss": 0.3241, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 44.72, |
|
"learning_rate": 4.0622222222222217e-07, |
|
"loss": 0.3257, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 45.07, |
|
"learning_rate": 4.0066666666666663e-07, |
|
"loss": 0.3238, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 45.42, |
|
"learning_rate": 3.9511111111111114e-07, |
|
"loss": 0.3222, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 45.77, |
|
"learning_rate": 3.8955555555555554e-07, |
|
"loss": 0.3244, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 46.13, |
|
"learning_rate": 3.84e-07, |
|
"loss": 0.3244, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 46.48, |
|
"learning_rate": 3.7844444444444445e-07, |
|
"loss": 0.3195, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 46.83, |
|
"learning_rate": 3.7288888888888886e-07, |
|
"loss": 0.3204, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 47.18, |
|
"learning_rate": 3.673333333333333e-07, |
|
"loss": 0.319, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 47.54, |
|
"learning_rate": 3.617777777777777e-07, |
|
"loss": 0.3191, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 47.89, |
|
"learning_rate": 3.5622222222222223e-07, |
|
"loss": 0.3155, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 48.24, |
|
"learning_rate": 3.506666666666667e-07, |
|
"loss": 0.3172, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 48.59, |
|
"learning_rate": 3.451111111111111e-07, |
|
"loss": 0.3127, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 48.94, |
|
"learning_rate": 3.3955555555555554e-07, |
|
"loss": 0.3217, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 49.3, |
|
"learning_rate": 3.34e-07, |
|
"loss": 0.3158, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 49.65, |
|
"learning_rate": 3.284444444444444e-07, |
|
"loss": 0.3149, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 3.2288888888888886e-07, |
|
"loss": 0.3137, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 50.35, |
|
"learning_rate": 3.173333333333333e-07, |
|
"loss": 0.3124, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 50.7, |
|
"learning_rate": 3.1177777777777777e-07, |
|
"loss": 0.3127, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 51.06, |
|
"learning_rate": 3.0622222222222223e-07, |
|
"loss": 0.3146, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 51.41, |
|
"learning_rate": 3.006666666666667e-07, |
|
"loss": 0.3094, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 51.76, |
|
"learning_rate": 2.951111111111111e-07, |
|
"loss": 0.3107, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 52.11, |
|
"learning_rate": 2.8955555555555555e-07, |
|
"loss": 0.3104, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 52.46, |
|
"learning_rate": 2.8399999999999995e-07, |
|
"loss": 0.3048, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 52.82, |
|
"learning_rate": 2.784444444444444e-07, |
|
"loss": 0.311, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 53.17, |
|
"learning_rate": 2.728888888888889e-07, |
|
"loss": 0.3109, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 53.52, |
|
"learning_rate": 2.673333333333333e-07, |
|
"loss": 0.3081, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 53.87, |
|
"learning_rate": 2.617777777777778e-07, |
|
"loss": 0.3097, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 54.23, |
|
"learning_rate": 2.5622222222222223e-07, |
|
"loss": 0.3127, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 54.58, |
|
"learning_rate": 2.5066666666666663e-07, |
|
"loss": 0.3042, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 54.93, |
|
"learning_rate": 2.451111111111111e-07, |
|
"loss": 0.3071, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 55.28, |
|
"learning_rate": 2.3955555555555555e-07, |
|
"loss": 0.3102, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 55.63, |
|
"learning_rate": 2.34e-07, |
|
"loss": 0.3041, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 55.99, |
|
"learning_rate": 2.2844444444444443e-07, |
|
"loss": 0.3064, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 56.34, |
|
"learning_rate": 2.2288888888888886e-07, |
|
"loss": 0.3073, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 56.34, |
|
"eval_loss": 0.5199663639068604, |
|
"eval_runtime": 77.0033, |
|
"eval_samples_per_second": 117.813, |
|
"eval_steps_per_second": 0.922, |
|
"eval_wer": 29.413794236519568, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 56.69, |
|
"learning_rate": 2.1733333333333332e-07, |
|
"loss": 0.3075, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 57.04, |
|
"learning_rate": 2.1177777777777778e-07, |
|
"loss": 0.2995, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 57.39, |
|
"learning_rate": 2.062222222222222e-07, |
|
"loss": 0.3036, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 57.75, |
|
"learning_rate": 2.0066666666666666e-07, |
|
"loss": 0.3047, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 58.1, |
|
"learning_rate": 1.9511111111111112e-07, |
|
"loss": 0.3024, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 58.45, |
|
"learning_rate": 1.8955555555555555e-07, |
|
"loss": 0.301, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 58.8, |
|
"learning_rate": 1.8399999999999998e-07, |
|
"loss": 0.3011, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 59.15, |
|
"learning_rate": 1.7844444444444444e-07, |
|
"loss": 0.3056, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 59.51, |
|
"learning_rate": 1.728888888888889e-07, |
|
"loss": 0.3008, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 59.86, |
|
"learning_rate": 1.6733333333333332e-07, |
|
"loss": 0.3041, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 60.21, |
|
"learning_rate": 1.6177777777777775e-07, |
|
"loss": 0.2984, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 60.56, |
|
"learning_rate": 1.5622222222222224e-07, |
|
"loss": 0.299, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 60.92, |
|
"learning_rate": 1.5066666666666667e-07, |
|
"loss": 0.3024, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 61.27, |
|
"learning_rate": 1.451111111111111e-07, |
|
"loss": 0.3005, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 61.62, |
|
"learning_rate": 1.3955555555555553e-07, |
|
"loss": 0.3016, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 61.97, |
|
"learning_rate": 1.34e-07, |
|
"loss": 0.2983, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 62.32, |
|
"learning_rate": 1.2844444444444444e-07, |
|
"loss": 0.2989, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 62.68, |
|
"learning_rate": 1.228888888888889e-07, |
|
"loss": 0.2973, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 63.03, |
|
"learning_rate": 1.1733333333333333e-07, |
|
"loss": 0.3012, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 63.38, |
|
"learning_rate": 1.1177777777777778e-07, |
|
"loss": 0.299, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 63.73, |
|
"learning_rate": 1.0622222222222221e-07, |
|
"loss": 0.2978, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 64.08, |
|
"learning_rate": 1.0066666666666667e-07, |
|
"loss": 0.2992, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 64.44, |
|
"learning_rate": 9.51111111111111e-08, |
|
"loss": 0.2988, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 64.79, |
|
"learning_rate": 8.955555555555555e-08, |
|
"loss": 0.2989, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 65.14, |
|
"learning_rate": 8.4e-08, |
|
"loss": 0.2966, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 65.49, |
|
"learning_rate": 7.844444444444444e-08, |
|
"loss": 0.3011, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 65.85, |
|
"learning_rate": 7.288888888888888e-08, |
|
"loss": 0.2934, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 66.2, |
|
"learning_rate": 6.733333333333333e-08, |
|
"loss": 0.2994, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 66.55, |
|
"learning_rate": 6.177777777777777e-08, |
|
"loss": 0.2979, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 66.9, |
|
"learning_rate": 5.622222222222222e-08, |
|
"loss": 0.2966, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 67.25, |
|
"learning_rate": 5.0666666666666664e-08, |
|
"loss": 0.2977, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 67.61, |
|
"learning_rate": 4.511111111111111e-08, |
|
"loss": 0.2968, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 67.96, |
|
"learning_rate": 3.955555555555555e-08, |
|
"loss": 0.2983, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 68.31, |
|
"learning_rate": 3.4e-08, |
|
"loss": 0.2943, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 68.66, |
|
"learning_rate": 2.8444444444444443e-08, |
|
"loss": 0.2969, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 69.01, |
|
"learning_rate": 2.2888888888888887e-08, |
|
"loss": 0.2974, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 69.37, |
|
"learning_rate": 1.7333333333333333e-08, |
|
"loss": 0.2977, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 69.72, |
|
"learning_rate": 1.1777777777777778e-08, |
|
"loss": 0.2947, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 70.07, |
|
"learning_rate": 6.222222222222222e-09, |
|
"loss": 0.297, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 70.42, |
|
"learning_rate": 6.666666666666666e-10, |
|
"loss": 0.2971, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 70.42, |
|
"eval_loss": 0.5191043019294739, |
|
"eval_runtime": 76.5999, |
|
"eval_samples_per_second": 118.434, |
|
"eval_steps_per_second": 0.927, |
|
"eval_wer": 28.965267965695134, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 70.42, |
|
"step": 5000, |
|
"total_flos": 3.14570266509312e+19, |
|
"train_loss": 0.422480837726593, |
|
"train_runtime": 11176.3562, |
|
"train_samples_per_second": 114.527, |
|
"train_steps_per_second": 0.447 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 71, |
|
"save_steps": 1000, |
|
"total_flos": 3.14570266509312e+19, |
|
"train_batch_size": 256, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|