|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9970559371933267, |
|
"eval_steps": 128, |
|
"global_step": 1018, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001962708537782139, |
|
"grad_norm": 62.355553075278046, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.8187, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001962708537782139, |
|
"eval_loss": 0.8275482654571533, |
|
"eval_runtime": 246.1117, |
|
"eval_samples_per_second": 123.33, |
|
"eval_steps_per_second": 3.856, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003925417075564278, |
|
"grad_norm": 62.468487550636745, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8042, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005888125613346418, |
|
"grad_norm": 4.2406831230151605, |
|
"learning_rate": 3e-06, |
|
"loss": 0.638, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007850834151128557, |
|
"grad_norm": 0.6232773471755494, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5818, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009813542688910697, |
|
"grad_norm": 1.2988116476459395, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5652, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011776251226692836, |
|
"grad_norm": 19.326748341786118, |
|
"learning_rate": 6e-06, |
|
"loss": 0.988, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013738959764474975, |
|
"grad_norm": 1.2669160024303416, |
|
"learning_rate": 7e-06, |
|
"loss": 0.6037, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015701668302257114, |
|
"grad_norm": 1.2907800971966077, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6027, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017664376840039256, |
|
"grad_norm": 0.7888911759146412, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5711, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.019627085377821395, |
|
"grad_norm": 0.7260839669652744, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5624, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.021589793915603533, |
|
"grad_norm": 0.6532838806100564, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.5612, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.023552502453385672, |
|
"grad_norm": 0.7559746737848189, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5493, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02551521099116781, |
|
"grad_norm": 0.5482270203080323, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.5435, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02747791952894995, |
|
"grad_norm": 0.4970646557650867, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.5408, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.029440628066732092, |
|
"grad_norm": 0.391725935764316, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.5241, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03140333660451423, |
|
"grad_norm": 0.43457047378514496, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.5488, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.033366045142296366, |
|
"grad_norm": 0.3789337362265595, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.5266, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03532875368007851, |
|
"grad_norm": 0.3331484005850679, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.5112, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03729146221786065, |
|
"grad_norm": 0.3444864658048081, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.5051, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03925417075564279, |
|
"grad_norm": 0.2887437422361683, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5289, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04121687929342493, |
|
"grad_norm": 0.278825983002472, |
|
"learning_rate": 1.9999987858045354e-05, |
|
"loss": 0.5289, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04317958783120707, |
|
"grad_norm": 0.39102343961692493, |
|
"learning_rate": 1.9999951432210905e-05, |
|
"loss": 0.5076, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.045142296368989206, |
|
"grad_norm": 0.5521017110831627, |
|
"learning_rate": 1.9999890722585106e-05, |
|
"loss": 0.5108, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.047105004906771344, |
|
"grad_norm": 0.9336591410925016, |
|
"learning_rate": 1.9999805729315383e-05, |
|
"loss": 0.5316, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04906771344455348, |
|
"grad_norm": 0.8299578992443886, |
|
"learning_rate": 1.9999696452608135e-05, |
|
"loss": 0.5334, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05103042198233562, |
|
"grad_norm": 4.751183353378357, |
|
"learning_rate": 1.999956289272873e-05, |
|
"loss": 0.5285, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05299313052011776, |
|
"grad_norm": 0.860325982956048, |
|
"learning_rate": 1.99994050500015e-05, |
|
"loss": 0.539, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0549558390578999, |
|
"grad_norm": 0.4897569644596324, |
|
"learning_rate": 1.999922292480975e-05, |
|
"loss": 0.5358, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05691854759568204, |
|
"grad_norm": 0.5398108233225083, |
|
"learning_rate": 1.9999016517595752e-05, |
|
"loss": 0.5351, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.058881256133464184, |
|
"grad_norm": 0.4780297052699839, |
|
"learning_rate": 1.9998785828860744e-05, |
|
"loss": 0.5185, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06084396467124632, |
|
"grad_norm": 0.33693365999492547, |
|
"learning_rate": 1.9998530859164926e-05, |
|
"loss": 0.5104, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06280667320902845, |
|
"grad_norm": 0.4261145101597809, |
|
"learning_rate": 1.9998251609127465e-05, |
|
"loss": 0.5005, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0647693817468106, |
|
"grad_norm": 0.3067512941150541, |
|
"learning_rate": 1.999794807942649e-05, |
|
"loss": 0.508, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06673209028459273, |
|
"grad_norm": 0.3756361155229969, |
|
"learning_rate": 1.999762027079909e-05, |
|
"loss": 0.5021, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06869479882237488, |
|
"grad_norm": 0.3065423142817771, |
|
"learning_rate": 1.9997268184041318e-05, |
|
"loss": 0.5242, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07065750736015702, |
|
"grad_norm": 0.27326520045279085, |
|
"learning_rate": 1.9996891820008165e-05, |
|
"loss": 0.5133, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07262021589793916, |
|
"grad_norm": 0.2964526747556581, |
|
"learning_rate": 1.9996491179613597e-05, |
|
"loss": 0.492, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0745829244357213, |
|
"grad_norm": 0.2446010040256801, |
|
"learning_rate": 1.9996066263830533e-05, |
|
"loss": 0.4952, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07654563297350343, |
|
"grad_norm": 0.22448559939725016, |
|
"learning_rate": 1.999561707369082e-05, |
|
"loss": 0.4917, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07850834151128558, |
|
"grad_norm": 0.22417659964598924, |
|
"learning_rate": 1.9995143610285275e-05, |
|
"loss": 0.4978, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08047105004906771, |
|
"grad_norm": 0.24556309982732719, |
|
"learning_rate": 1.9994645874763657e-05, |
|
"loss": 0.5123, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08243375858684986, |
|
"grad_norm": 0.2078136882784064, |
|
"learning_rate": 1.9994123868334655e-05, |
|
"loss": 0.5008, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08439646712463199, |
|
"grad_norm": 0.2126670968682465, |
|
"learning_rate": 1.999357759226591e-05, |
|
"loss": 0.5028, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08635917566241413, |
|
"grad_norm": 0.19399581978765076, |
|
"learning_rate": 1.9993007047883988e-05, |
|
"loss": 0.4937, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08832188420019627, |
|
"grad_norm": 0.1873229933356675, |
|
"learning_rate": 1.9992412236574396e-05, |
|
"loss": 0.4798, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09028459273797841, |
|
"grad_norm": 0.19589361661093946, |
|
"learning_rate": 1.999179315978157e-05, |
|
"loss": 0.4794, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09224730127576054, |
|
"grad_norm": 0.174199073484297, |
|
"learning_rate": 1.999114981900887e-05, |
|
"loss": 0.4735, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09421000981354269, |
|
"grad_norm": 0.16944247667745355, |
|
"learning_rate": 1.999048221581858e-05, |
|
"loss": 0.4843, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09617271835132483, |
|
"grad_norm": 0.1854184220985238, |
|
"learning_rate": 1.9989790351831898e-05, |
|
"loss": 0.4774, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09813542688910697, |
|
"grad_norm": 0.16797468279664923, |
|
"learning_rate": 1.9989074228728942e-05, |
|
"loss": 0.4684, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10009813542688911, |
|
"grad_norm": 0.16773676945215363, |
|
"learning_rate": 1.998833384824874e-05, |
|
"loss": 0.4613, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.10206084396467124, |
|
"grad_norm": 0.1645777226690783, |
|
"learning_rate": 1.9987569212189224e-05, |
|
"loss": 0.477, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10402355250245339, |
|
"grad_norm": 0.17954527682275045, |
|
"learning_rate": 1.998678032240723e-05, |
|
"loss": 0.4718, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10598626104023552, |
|
"grad_norm": 0.16162280011064276, |
|
"learning_rate": 1.9985967180818493e-05, |
|
"loss": 0.4927, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10794896957801767, |
|
"grad_norm": 0.16773939638216998, |
|
"learning_rate": 1.9985129789397633e-05, |
|
"loss": 0.4797, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1099116781157998, |
|
"grad_norm": 0.18424558774001681, |
|
"learning_rate": 1.998426815017817e-05, |
|
"loss": 0.4747, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11187438665358194, |
|
"grad_norm": 0.1388232795248698, |
|
"learning_rate": 1.9983382265252494e-05, |
|
"loss": 0.4633, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11383709519136408, |
|
"grad_norm": 0.17752346108818134, |
|
"learning_rate": 1.998247213677188e-05, |
|
"loss": 0.4613, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11579980372914622, |
|
"grad_norm": 0.14030828634333592, |
|
"learning_rate": 1.9981537766946486e-05, |
|
"loss": 0.4726, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11776251226692837, |
|
"grad_norm": 0.17867869880789194, |
|
"learning_rate": 1.9980579158045322e-05, |
|
"loss": 0.4659, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1197252208047105, |
|
"grad_norm": 0.17555178239215463, |
|
"learning_rate": 1.9979596312396258e-05, |
|
"loss": 0.4882, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12168792934249265, |
|
"grad_norm": 0.1525166881836132, |
|
"learning_rate": 1.9978589232386036e-05, |
|
"loss": 0.4668, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12365063788027478, |
|
"grad_norm": 0.16104011998686385, |
|
"learning_rate": 1.9977557920460237e-05, |
|
"loss": 0.5001, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1256133464180569, |
|
"grad_norm": 0.16256381990686963, |
|
"learning_rate": 1.997650237912329e-05, |
|
"loss": 0.4615, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12757605495583907, |
|
"grad_norm": 0.14954423248271237, |
|
"learning_rate": 1.9975422610938463e-05, |
|
"loss": 0.4686, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1295387634936212, |
|
"grad_norm": 0.19293534758098466, |
|
"learning_rate": 1.997431861852785e-05, |
|
"loss": 0.4607, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.13150147203140333, |
|
"grad_norm": 0.13976174634255645, |
|
"learning_rate": 1.997319040457238e-05, |
|
"loss": 0.4727, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.13346418056918546, |
|
"grad_norm": 0.18195008776452798, |
|
"learning_rate": 1.9972037971811802e-05, |
|
"loss": 0.4681, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13542688910696762, |
|
"grad_norm": 0.13607271529544315, |
|
"learning_rate": 1.9970861323044667e-05, |
|
"loss": 0.4747, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13738959764474976, |
|
"grad_norm": 0.14246714469054536, |
|
"learning_rate": 1.996966046112834e-05, |
|
"loss": 0.457, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1393523061825319, |
|
"grad_norm": 0.15545571681772094, |
|
"learning_rate": 1.9968435388978986e-05, |
|
"loss": 0.4654, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14131501472031405, |
|
"grad_norm": 0.1631020324563178, |
|
"learning_rate": 1.996718610957155e-05, |
|
"loss": 0.4663, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14327772325809618, |
|
"grad_norm": 0.14479401685365606, |
|
"learning_rate": 1.996591262593978e-05, |
|
"loss": 0.4522, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1452404317958783, |
|
"grad_norm": 0.15470054571534417, |
|
"learning_rate": 1.9964614941176194e-05, |
|
"loss": 0.4596, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14720314033366044, |
|
"grad_norm": 0.13412246717171877, |
|
"learning_rate": 1.9963293058432066e-05, |
|
"loss": 0.4465, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1491658488714426, |
|
"grad_norm": 0.14095810281768514, |
|
"learning_rate": 1.9961946980917457e-05, |
|
"loss": 0.4537, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15112855740922473, |
|
"grad_norm": 0.16396930622944708, |
|
"learning_rate": 1.996057671190116e-05, |
|
"loss": 0.453, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15309126594700687, |
|
"grad_norm": 0.5108895960415857, |
|
"learning_rate": 1.995918225471073e-05, |
|
"loss": 0.4819, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.155053974484789, |
|
"grad_norm": 0.16436026012077695, |
|
"learning_rate": 1.995776361273245e-05, |
|
"loss": 0.4481, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15701668302257116, |
|
"grad_norm": 0.18055053584795847, |
|
"learning_rate": 1.9956320789411338e-05, |
|
"loss": 0.4561, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1589793915603533, |
|
"grad_norm": 0.17300584614095313, |
|
"learning_rate": 1.9954853788251137e-05, |
|
"loss": 0.4626, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16094210009813542, |
|
"grad_norm": 0.18537091420945745, |
|
"learning_rate": 1.9953362612814294e-05, |
|
"loss": 0.467, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16290480863591755, |
|
"grad_norm": 0.1831953893733552, |
|
"learning_rate": 1.995184726672197e-05, |
|
"loss": 0.4533, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1648675171736997, |
|
"grad_norm": 0.1785397387970331, |
|
"learning_rate": 1.9950307753654016e-05, |
|
"loss": 0.4714, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16683022571148184, |
|
"grad_norm": 0.1696225474699336, |
|
"learning_rate": 1.994874407734897e-05, |
|
"loss": 0.4695, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16879293424926398, |
|
"grad_norm": 0.20269683913377595, |
|
"learning_rate": 1.994715624160405e-05, |
|
"loss": 0.4648, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.17075564278704614, |
|
"grad_norm": 0.18739457130939605, |
|
"learning_rate": 1.9945544250275147e-05, |
|
"loss": 0.4608, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17271835132482827, |
|
"grad_norm": 0.21960029028555894, |
|
"learning_rate": 1.99439081072768e-05, |
|
"loss": 0.4576, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1746810598626104, |
|
"grad_norm": 0.1959495489832016, |
|
"learning_rate": 1.9942247816582206e-05, |
|
"loss": 0.4596, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17664376840039253, |
|
"grad_norm": 0.17212969050820215, |
|
"learning_rate": 1.9940563382223196e-05, |
|
"loss": 0.4522, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1786064769381747, |
|
"grad_norm": 0.1663706916020894, |
|
"learning_rate": 1.9938854808290244e-05, |
|
"loss": 0.4504, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.18056918547595682, |
|
"grad_norm": 0.31290735737771297, |
|
"learning_rate": 1.9937122098932428e-05, |
|
"loss": 0.4608, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18253189401373895, |
|
"grad_norm": 0.23409310633529254, |
|
"learning_rate": 1.9935365258357446e-05, |
|
"loss": 0.4559, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1844946025515211, |
|
"grad_norm": 0.2596933197806358, |
|
"learning_rate": 1.9933584290831593e-05, |
|
"loss": 0.4698, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.18645731108930325, |
|
"grad_norm": 0.19766333203388936, |
|
"learning_rate": 1.9931779200679754e-05, |
|
"loss": 0.443, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18842001962708538, |
|
"grad_norm": 0.18968390210321073, |
|
"learning_rate": 1.9929949992285397e-05, |
|
"loss": 0.4623, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1903827281648675, |
|
"grad_norm": 0.16223204516832543, |
|
"learning_rate": 1.9928096670090552e-05, |
|
"loss": 0.4772, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19234543670264967, |
|
"grad_norm": 0.18715665258083516, |
|
"learning_rate": 1.992621923859581e-05, |
|
"loss": 0.4771, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1943081452404318, |
|
"grad_norm": 0.2670823974648894, |
|
"learning_rate": 1.992431770236031e-05, |
|
"loss": 0.4625, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19627085377821393, |
|
"grad_norm": 0.2389458544722749, |
|
"learning_rate": 1.9922392066001724e-05, |
|
"loss": 0.4496, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19823356231599606, |
|
"grad_norm": 0.18959477739852718, |
|
"learning_rate": 1.9920442334196248e-05, |
|
"loss": 0.4685, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.20019627085377822, |
|
"grad_norm": 0.1872065955224367, |
|
"learning_rate": 1.99184685116786e-05, |
|
"loss": 0.4724, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.20215897939156036, |
|
"grad_norm": 0.2002235120172999, |
|
"learning_rate": 1.991647060324198e-05, |
|
"loss": 0.456, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2041216879293425, |
|
"grad_norm": 0.2764357846718622, |
|
"learning_rate": 1.9914448613738107e-05, |
|
"loss": 0.4496, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.20608439646712462, |
|
"grad_norm": 0.1916898969998294, |
|
"learning_rate": 1.991240254807715e-05, |
|
"loss": 0.4584, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20804710500490678, |
|
"grad_norm": 0.3108126102936448, |
|
"learning_rate": 1.991033241122776e-05, |
|
"loss": 0.4595, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2100098135426889, |
|
"grad_norm": 0.21101958681311, |
|
"learning_rate": 1.990823820821704e-05, |
|
"loss": 0.4911, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.21197252208047104, |
|
"grad_norm": 0.42110628942523765, |
|
"learning_rate": 1.9906119944130527e-05, |
|
"loss": 0.4605, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2139352306182532, |
|
"grad_norm": 0.17728266885723962, |
|
"learning_rate": 1.9903977624112204e-05, |
|
"loss": 0.461, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.21589793915603533, |
|
"grad_norm": 0.26344485864001554, |
|
"learning_rate": 1.9901811253364458e-05, |
|
"loss": 0.456, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21786064769381747, |
|
"grad_norm": 0.24928437921890148, |
|
"learning_rate": 1.989962083714808e-05, |
|
"loss": 0.4565, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2198233562315996, |
|
"grad_norm": 0.22760146691028113, |
|
"learning_rate": 1.9897406380782262e-05, |
|
"loss": 0.4617, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.22178606476938176, |
|
"grad_norm": 0.33505042507064053, |
|
"learning_rate": 1.9895167889644568e-05, |
|
"loss": 0.4619, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2237487733071639, |
|
"grad_norm": 0.23380989191893622, |
|
"learning_rate": 1.989290536917093e-05, |
|
"loss": 0.4682, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.22571148184494602, |
|
"grad_norm": 0.16292574235785917, |
|
"learning_rate": 1.9890618824855624e-05, |
|
"loss": 0.4609, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22767419038272815, |
|
"grad_norm": 0.21123922747241927, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.4535, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2296368989205103, |
|
"grad_norm": 0.19216925224306786, |
|
"learning_rate": 1.988597368696886e-05, |
|
"loss": 0.4666, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.23159960745829244, |
|
"grad_norm": 0.1978727333868667, |
|
"learning_rate": 1.988361510467761e-05, |
|
"loss": 0.4483, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.23356231599607458, |
|
"grad_norm": 0.2024852375366097, |
|
"learning_rate": 1.988123252110509e-05, |
|
"loss": 0.4575, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.23552502453385674, |
|
"grad_norm": 0.2275812980861413, |
|
"learning_rate": 1.9878825942037147e-05, |
|
"loss": 0.4733, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23748773307163887, |
|
"grad_norm": 0.16622185526393202, |
|
"learning_rate": 1.98763953733179e-05, |
|
"loss": 0.4638, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.239450441609421, |
|
"grad_norm": 0.1599896679961423, |
|
"learning_rate": 1.9873940820849714e-05, |
|
"loss": 0.4676, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.24141315014720313, |
|
"grad_norm": 0.17524027525754993, |
|
"learning_rate": 1.9871462290593206e-05, |
|
"loss": 0.4671, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2433758586849853, |
|
"grad_norm": 0.18451386556539923, |
|
"learning_rate": 1.9868959788567213e-05, |
|
"loss": 0.4714, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.24533856722276742, |
|
"grad_norm": 0.20332604727761022, |
|
"learning_rate": 1.9866433320848793e-05, |
|
"loss": 0.4395, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24730127576054955, |
|
"grad_norm": 0.13917105010229833, |
|
"learning_rate": 1.9863882893573188e-05, |
|
"loss": 0.4568, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2492639842983317, |
|
"grad_norm": 0.17751758499748868, |
|
"learning_rate": 1.9861308512933846e-05, |
|
"loss": 0.4746, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2512266928361138, |
|
"grad_norm": 0.15877578177692772, |
|
"learning_rate": 1.985871018518236e-05, |
|
"loss": 0.4515, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2512266928361138, |
|
"eval_loss": 0.4546394944190979, |
|
"eval_runtime": 245.4163, |
|
"eval_samples_per_second": 123.68, |
|
"eval_steps_per_second": 3.867, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.25318940137389595, |
|
"grad_norm": 0.16746870749832415, |
|
"learning_rate": 1.9856087916628487e-05, |
|
"loss": 0.4564, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.25515210991167814, |
|
"grad_norm": 0.32948890713463225, |
|
"learning_rate": 1.9853441713640123e-05, |
|
"loss": 0.4604, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25711481844946027, |
|
"grad_norm": 0.19647029953806538, |
|
"learning_rate": 1.985077158264328e-05, |
|
"loss": 0.4439, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2590775269872424, |
|
"grad_norm": 0.251393507519347, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 0.4574, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.26104023552502453, |
|
"grad_norm": 0.24198770425177948, |
|
"learning_rate": 1.984535956261874e-05, |
|
"loss": 0.4725, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.26300294406280667, |
|
"grad_norm": 0.19883583006200609, |
|
"learning_rate": 1.9842617686733546e-05, |
|
"loss": 0.4509, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2649656526005888, |
|
"grad_norm": 0.19609522995459108, |
|
"learning_rate": 1.983985190912484e-05, |
|
"loss": 0.4513, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26692836113837093, |
|
"grad_norm": 0.18132847552230033, |
|
"learning_rate": 1.9837062236509013e-05, |
|
"loss": 0.4614, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2688910696761531, |
|
"grad_norm": 0.2617064903935584, |
|
"learning_rate": 1.9834248675660484e-05, |
|
"loss": 0.4729, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.27085377821393525, |
|
"grad_norm": 0.15567291724986626, |
|
"learning_rate": 1.983141123341168e-05, |
|
"loss": 0.4496, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2728164867517174, |
|
"grad_norm": 0.8337178932954963, |
|
"learning_rate": 1.9828549916653013e-05, |
|
"loss": 0.4464, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2747791952894995, |
|
"grad_norm": 0.13791288977874855, |
|
"learning_rate": 1.9825664732332886e-05, |
|
"loss": 0.4484, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27674190382728164, |
|
"grad_norm": 0.15828576470912684, |
|
"learning_rate": 1.9822755687457645e-05, |
|
"loss": 0.4568, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2787046123650638, |
|
"grad_norm": 0.18001770804302206, |
|
"learning_rate": 1.9819822789091597e-05, |
|
"loss": 0.4651, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2806673209028459, |
|
"grad_norm": 0.39836155933646317, |
|
"learning_rate": 1.9816866044356968e-05, |
|
"loss": 0.4481, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2826300294406281, |
|
"grad_norm": 0.15389096751590023, |
|
"learning_rate": 1.981388546043388e-05, |
|
"loss": 0.4604, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2845927379784102, |
|
"grad_norm": 0.23663992095817404, |
|
"learning_rate": 1.981088104456036e-05, |
|
"loss": 0.4587, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28655544651619236, |
|
"grad_norm": 0.18263790394872537, |
|
"learning_rate": 1.9807852804032306e-05, |
|
"loss": 0.4606, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2885181550539745, |
|
"grad_norm": 0.1790389596426215, |
|
"learning_rate": 1.980480074620347e-05, |
|
"loss": 0.4492, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2904808635917566, |
|
"grad_norm": 0.27451162166681453, |
|
"learning_rate": 1.9801724878485438e-05, |
|
"loss": 0.458, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.29244357212953875, |
|
"grad_norm": 0.17498754542123285, |
|
"learning_rate": 1.9798625208347627e-05, |
|
"loss": 0.4583, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2944062806673209, |
|
"grad_norm": 0.9166003434108919, |
|
"learning_rate": 1.979550174331724e-05, |
|
"loss": 0.4599, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.296368989205103, |
|
"grad_norm": 1.7151984325661747, |
|
"learning_rate": 1.9792354490979275e-05, |
|
"loss": 0.4652, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2983316977428852, |
|
"grad_norm": 3.7084263741544903, |
|
"learning_rate": 1.9789183458976485e-05, |
|
"loss": 0.5222, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.30029440628066734, |
|
"grad_norm": 0.23854084460339886, |
|
"learning_rate": 1.9785988655009386e-05, |
|
"loss": 0.4612, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.30225711481844947, |
|
"grad_norm": 0.4504871270083281, |
|
"learning_rate": 1.97827700868362e-05, |
|
"loss": 0.4812, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3042198233562316, |
|
"grad_norm": 0.4201391978088661, |
|
"learning_rate": 1.9779527762272877e-05, |
|
"loss": 0.4855, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.30618253189401373, |
|
"grad_norm": 0.44160943541930997, |
|
"learning_rate": 1.977626168919305e-05, |
|
"loss": 0.4723, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.30814524043179586, |
|
"grad_norm": 0.4231731091405931, |
|
"learning_rate": 1.977297187552801e-05, |
|
"loss": 0.4648, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.310107948969578, |
|
"grad_norm": 0.3638067041093047, |
|
"learning_rate": 1.9769658329266718e-05, |
|
"loss": 0.4545, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3120706575073602, |
|
"grad_norm": 0.4239566722192982, |
|
"learning_rate": 1.976632105845576e-05, |
|
"loss": 0.4849, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3140333660451423, |
|
"grad_norm": 0.3461796288026477, |
|
"learning_rate": 1.9762960071199334e-05, |
|
"loss": 0.4794, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.31599607458292445, |
|
"grad_norm": 0.24887697538008421, |
|
"learning_rate": 1.9759575375659232e-05, |
|
"loss": 0.4814, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3179587831207066, |
|
"grad_norm": 0.24050744980645108, |
|
"learning_rate": 1.9756166980054812e-05, |
|
"loss": 0.4516, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3199214916584887, |
|
"grad_norm": 0.2345271899376135, |
|
"learning_rate": 1.9752734892663e-05, |
|
"loss": 0.4779, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.32188420019627084, |
|
"grad_norm": 0.27248178409002094, |
|
"learning_rate": 1.9749279121818235e-05, |
|
"loss": 0.4677, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.323846908734053, |
|
"grad_norm": 0.19021861355008923, |
|
"learning_rate": 1.9745799675912492e-05, |
|
"loss": 0.4491, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3258096172718351, |
|
"grad_norm": 0.20109086134349027, |
|
"learning_rate": 1.9742296563395218e-05, |
|
"loss": 0.4485, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3277723258096173, |
|
"grad_norm": 0.19466230218717548, |
|
"learning_rate": 1.9738769792773338e-05, |
|
"loss": 0.4429, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3297350343473994, |
|
"grad_norm": 0.22618689389367447, |
|
"learning_rate": 1.9735219372611232e-05, |
|
"loss": 0.4776, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.33169774288518156, |
|
"grad_norm": 0.2548001164602319, |
|
"learning_rate": 1.9731645311530718e-05, |
|
"loss": 0.4544, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3336604514229637, |
|
"grad_norm": 0.1791237461163614, |
|
"learning_rate": 1.9728047618210995e-05, |
|
"loss": 0.4554, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3356231599607458, |
|
"grad_norm": 0.3948418555429021, |
|
"learning_rate": 1.9724426301388683e-05, |
|
"loss": 0.4638, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.33758586849852795, |
|
"grad_norm": 0.17378090038653812, |
|
"learning_rate": 1.9720781369857747e-05, |
|
"loss": 0.4454, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3395485770363101, |
|
"grad_norm": 0.3222426037289685, |
|
"learning_rate": 1.971711283246951e-05, |
|
"loss": 0.4535, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.34151128557409227, |
|
"grad_norm": 0.18445752660643416, |
|
"learning_rate": 1.9713420698132614e-05, |
|
"loss": 0.4529, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3434739941118744, |
|
"grad_norm": 0.16185923929968957, |
|
"learning_rate": 1.9709704975813007e-05, |
|
"loss": 0.4368, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.34543670264965654, |
|
"grad_norm": 1.1894973875213863, |
|
"learning_rate": 1.970596567453391e-05, |
|
"loss": 0.4544, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.34739941118743867, |
|
"grad_norm": 0.16134561772472725, |
|
"learning_rate": 1.9702202803375813e-05, |
|
"loss": 0.4622, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3493621197252208, |
|
"grad_norm": 0.1970049090626823, |
|
"learning_rate": 1.9698416371476434e-05, |
|
"loss": 0.4671, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.35132482826300293, |
|
"grad_norm": 0.36331006012853106, |
|
"learning_rate": 1.9694606388030715e-05, |
|
"loss": 0.457, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.35328753680078506, |
|
"grad_norm": 0.17936278528599436, |
|
"learning_rate": 1.969077286229078e-05, |
|
"loss": 0.4548, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.35525024533856725, |
|
"grad_norm": 1.9159888836797607, |
|
"learning_rate": 1.9686915803565934e-05, |
|
"loss": 0.4645, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3572129538763494, |
|
"grad_norm": 10.74627429291283, |
|
"learning_rate": 1.9683035221222617e-05, |
|
"loss": 0.6072, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3591756624141315, |
|
"grad_norm": 52.13270281331323, |
|
"learning_rate": 1.9679131124684403e-05, |
|
"loss": 1.8264, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.36113837095191365, |
|
"grad_norm": 4.418687411757653, |
|
"learning_rate": 1.9675203523431964e-05, |
|
"loss": 0.5769, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3631010794896958, |
|
"grad_norm": 0.5887657411717333, |
|
"learning_rate": 1.9671252427003052e-05, |
|
"loss": 0.5168, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3650637880274779, |
|
"grad_norm": 0.674215963992249, |
|
"learning_rate": 1.9667277844992476e-05, |
|
"loss": 0.5026, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.36702649656526004, |
|
"grad_norm": 1.0528021833617283, |
|
"learning_rate": 1.966327978705207e-05, |
|
"loss": 0.5349, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3689892051030422, |
|
"grad_norm": 0.9422062018416699, |
|
"learning_rate": 1.9659258262890683e-05, |
|
"loss": 0.5042, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.37095191364082436, |
|
"grad_norm": 0.38055499460304, |
|
"learning_rate": 1.9655213282274153e-05, |
|
"loss": 0.494, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3729146221786065, |
|
"grad_norm": 0.7565736806021138, |
|
"learning_rate": 1.9651144855025265e-05, |
|
"loss": 0.4757, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3748773307163886, |
|
"grad_norm": 0.6604416755715676, |
|
"learning_rate": 1.964705299102376e-05, |
|
"loss": 0.4694, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.37684003925417076, |
|
"grad_norm": 0.5198922569733609, |
|
"learning_rate": 1.964293770020628e-05, |
|
"loss": 0.4896, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3788027477919529, |
|
"grad_norm": 0.3341751217641192, |
|
"learning_rate": 1.9638798992566354e-05, |
|
"loss": 0.482, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.380765456329735, |
|
"grad_norm": 0.34613355879167534, |
|
"learning_rate": 1.9634636878154393e-05, |
|
"loss": 0.4903, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.38272816486751715, |
|
"grad_norm": 0.29136481910346357, |
|
"learning_rate": 1.963045136707763e-05, |
|
"loss": 0.4795, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.38469087340529934, |
|
"grad_norm": 0.34118862521510146, |
|
"learning_rate": 1.962624246950012e-05, |
|
"loss": 0.4826, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.38665358194308147, |
|
"grad_norm": 0.45727416620673994, |
|
"learning_rate": 1.9622010195642724e-05, |
|
"loss": 0.4653, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3886162904808636, |
|
"grad_norm": 0.312131527349307, |
|
"learning_rate": 1.9617754555783045e-05, |
|
"loss": 0.4761, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.39057899901864573, |
|
"grad_norm": 0.26137988821611824, |
|
"learning_rate": 1.9613475560255445e-05, |
|
"loss": 0.4792, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.39254170755642787, |
|
"grad_norm": 0.2571733191752843, |
|
"learning_rate": 1.9609173219450998e-05, |
|
"loss": 0.4636, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.39450441609421, |
|
"grad_norm": 0.286975647231177, |
|
"learning_rate": 1.9604847543817467e-05, |
|
"loss": 0.4733, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.39646712463199213, |
|
"grad_norm": 0.18272338104532077, |
|
"learning_rate": 1.960049854385929e-05, |
|
"loss": 0.437, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.39842983316977426, |
|
"grad_norm": 0.22862620377890577, |
|
"learning_rate": 1.9596126230137532e-05, |
|
"loss": 0.4634, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.40039254170755645, |
|
"grad_norm": 0.3492075746901584, |
|
"learning_rate": 1.9591730613269878e-05, |
|
"loss": 0.4776, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.4023552502453386, |
|
"grad_norm": 0.21584801782728166, |
|
"learning_rate": 1.9587311703930615e-05, |
|
"loss": 0.45, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4043179587831207, |
|
"grad_norm": 0.1775551780611007, |
|
"learning_rate": 1.9582869512850576e-05, |
|
"loss": 0.4588, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.40628066732090284, |
|
"grad_norm": 0.19395395115771327, |
|
"learning_rate": 1.9578404050817135e-05, |
|
"loss": 0.4476, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.408243375858685, |
|
"grad_norm": 0.1756543157586606, |
|
"learning_rate": 1.957391532867418e-05, |
|
"loss": 0.4541, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4102060843964671, |
|
"grad_norm": 0.2033504241274638, |
|
"learning_rate": 1.956940335732209e-05, |
|
"loss": 0.4517, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.41216879293424924, |
|
"grad_norm": 0.17283372188002102, |
|
"learning_rate": 1.956486814771769e-05, |
|
"loss": 0.4388, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4141315014720314, |
|
"grad_norm": 0.3603837587378834, |
|
"learning_rate": 1.956030971087424e-05, |
|
"loss": 0.4589, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.41609421000981356, |
|
"grad_norm": 0.14136642540755467, |
|
"learning_rate": 1.955572805786141e-05, |
|
"loss": 0.4458, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4180569185475957, |
|
"grad_norm": 0.5877313778359132, |
|
"learning_rate": 1.9551123199805243e-05, |
|
"loss": 0.4697, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4200196270853778, |
|
"grad_norm": 0.14159168090968688, |
|
"learning_rate": 1.9546495147888134e-05, |
|
"loss": 0.4615, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.42198233562315995, |
|
"grad_norm": 0.14811573102512038, |
|
"learning_rate": 1.9541843913348804e-05, |
|
"loss": 0.4434, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4239450441609421, |
|
"grad_norm": 0.1892192591601388, |
|
"learning_rate": 1.953716950748227e-05, |
|
"loss": 0.4502, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4259077526987242, |
|
"grad_norm": 0.18436679871777018, |
|
"learning_rate": 1.9532471941639816e-05, |
|
"loss": 0.4583, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4278704612365064, |
|
"grad_norm": 0.16076460592232203, |
|
"learning_rate": 1.9527751227228964e-05, |
|
"loss": 0.4782, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.42983316977428854, |
|
"grad_norm": 0.16974596439970596, |
|
"learning_rate": 1.952300737571346e-05, |
|
"loss": 0.4417, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.43179587831207067, |
|
"grad_norm": 0.1489166172695092, |
|
"learning_rate": 1.9518240398613226e-05, |
|
"loss": 0.45, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4337585868498528, |
|
"grad_norm": 0.19974086406341826, |
|
"learning_rate": 1.9513450307504346e-05, |
|
"loss": 0.4457, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.43572129538763493, |
|
"grad_norm": 0.14988334374863602, |
|
"learning_rate": 1.9508637114019037e-05, |
|
"loss": 0.4416, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.43768400392541706, |
|
"grad_norm": 0.15223238304202846, |
|
"learning_rate": 1.9503800829845613e-05, |
|
"loss": 0.4463, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4396467124631992, |
|
"grad_norm": 0.14747133903175189, |
|
"learning_rate": 1.9498941466728462e-05, |
|
"loss": 0.4408, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.44160942100098133, |
|
"grad_norm": 0.1526636764634093, |
|
"learning_rate": 1.9494059036468016e-05, |
|
"loss": 0.4499, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4435721295387635, |
|
"grad_norm": 0.1628546826649206, |
|
"learning_rate": 1.9489153550920726e-05, |
|
"loss": 0.4439, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.44553483807654565, |
|
"grad_norm": 0.15901123933171535, |
|
"learning_rate": 1.9484225021999032e-05, |
|
"loss": 0.4412, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4474975466143278, |
|
"grad_norm": 0.1490150024919776, |
|
"learning_rate": 1.947927346167132e-05, |
|
"loss": 0.455, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4494602551521099, |
|
"grad_norm": 0.5516033577713996, |
|
"learning_rate": 1.9474298881961918e-05, |
|
"loss": 0.4566, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.45142296368989204, |
|
"grad_norm": 0.13967143480519018, |
|
"learning_rate": 1.946930129495106e-05, |
|
"loss": 0.4628, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4533856722276742, |
|
"grad_norm": 0.1305332529656271, |
|
"learning_rate": 1.9464280712774828e-05, |
|
"loss": 0.4393, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4553483807654563, |
|
"grad_norm": 0.1345554686370529, |
|
"learning_rate": 1.945923714762516e-05, |
|
"loss": 0.4545, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4573110893032385, |
|
"grad_norm": 0.136671297145043, |
|
"learning_rate": 1.9454170611749812e-05, |
|
"loss": 0.4538, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4592737978410206, |
|
"grad_norm": 0.18983035932318434, |
|
"learning_rate": 1.9449081117452304e-05, |
|
"loss": 0.4551, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.46123650637880276, |
|
"grad_norm": 0.13254449859971484, |
|
"learning_rate": 1.9443968677091925e-05, |
|
"loss": 0.4364, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4631992149165849, |
|
"grad_norm": 0.12020638993133913, |
|
"learning_rate": 1.9438833303083677e-05, |
|
"loss": 0.4442, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.465161923454367, |
|
"grad_norm": 0.13012001270770882, |
|
"learning_rate": 1.9433675007898255e-05, |
|
"loss": 0.4397, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.46712463199214915, |
|
"grad_norm": 0.260116572268421, |
|
"learning_rate": 1.9428493804062013e-05, |
|
"loss": 0.4446, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4690873405299313, |
|
"grad_norm": 0.1231982496143958, |
|
"learning_rate": 1.9423289704156945e-05, |
|
"loss": 0.4616, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.47105004906771347, |
|
"grad_norm": 0.11787031140608273, |
|
"learning_rate": 1.9418062720820636e-05, |
|
"loss": 0.4438, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4730127576054956, |
|
"grad_norm": 0.13848283459243596, |
|
"learning_rate": 1.9412812866746248e-05, |
|
"loss": 0.4479, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.47497546614327774, |
|
"grad_norm": 0.12036969167700294, |
|
"learning_rate": 1.9407540154682473e-05, |
|
"loss": 0.4508, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.47693817468105987, |
|
"grad_norm": 0.17542281239029114, |
|
"learning_rate": 1.9402244597433526e-05, |
|
"loss": 0.4393, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.478900883218842, |
|
"grad_norm": 0.1137790859381083, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.4444, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.48086359175662413, |
|
"grad_norm": 0.18308427216283138, |
|
"learning_rate": 1.939158499887428e-05, |
|
"loss": 0.4272, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48282630029440626, |
|
"grad_norm": 0.13610805029149947, |
|
"learning_rate": 1.9386220983449652e-05, |
|
"loss": 0.4453, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4847890088321884, |
|
"grad_norm": 0.12057983580084324, |
|
"learning_rate": 1.938083417461113e-05, |
|
"loss": 0.4298, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4867517173699706, |
|
"grad_norm": 0.11085757126201562, |
|
"learning_rate": 1.9375424585439994e-05, |
|
"loss": 0.4389, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4887144259077527, |
|
"grad_norm": 0.11914366344102803, |
|
"learning_rate": 1.9369992229072834e-05, |
|
"loss": 0.4427, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.49067713444553485, |
|
"grad_norm": 0.13244864999661263, |
|
"learning_rate": 1.9364537118701542e-05, |
|
"loss": 0.4347, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.492639842983317, |
|
"grad_norm": 0.19302953420237423, |
|
"learning_rate": 1.935905926757326e-05, |
|
"loss": 0.4499, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4946025515210991, |
|
"grad_norm": 0.11571291217554695, |
|
"learning_rate": 1.935355868899034e-05, |
|
"loss": 0.4375, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.49656526005888124, |
|
"grad_norm": 0.11702904926416031, |
|
"learning_rate": 1.9348035396310352e-05, |
|
"loss": 0.4354, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4985279685966634, |
|
"grad_norm": 0.12006868637499236, |
|
"learning_rate": 1.9342489402945997e-05, |
|
"loss": 0.4338, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5004906771344455, |
|
"grad_norm": 0.12190055167354476, |
|
"learning_rate": 1.9336920722365126e-05, |
|
"loss": 0.4434, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5024533856722276, |
|
"grad_norm": 0.12867582275673506, |
|
"learning_rate": 1.9331329368090664e-05, |
|
"loss": 0.435, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5024533856722276, |
|
"eval_loss": 0.44365355372428894, |
|
"eval_runtime": 245.6487, |
|
"eval_samples_per_second": 123.563, |
|
"eval_steps_per_second": 3.863, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5044160942100098, |
|
"grad_norm": 0.1233612138867142, |
|
"learning_rate": 1.932571535370061e-05, |
|
"loss": 0.4417, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5063788027477919, |
|
"grad_norm": 2.5334855914455368, |
|
"learning_rate": 1.932007869282799e-05, |
|
"loss": 0.4645, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5083415112855741, |
|
"grad_norm": 0.15972840390446322, |
|
"learning_rate": 1.9314419399160805e-05, |
|
"loss": 0.4328, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5103042198233563, |
|
"grad_norm": 0.21045316053724383, |
|
"learning_rate": 1.9308737486442045e-05, |
|
"loss": 0.437, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5122669283611384, |
|
"grad_norm": 0.1353556883894671, |
|
"learning_rate": 1.930303296846961e-05, |
|
"loss": 0.4532, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5142296368989205, |
|
"grad_norm": 0.13254413062805587, |
|
"learning_rate": 1.9297305859096305e-05, |
|
"loss": 0.452, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5161923454367027, |
|
"grad_norm": 0.19171081658979544, |
|
"learning_rate": 1.9291556172229784e-05, |
|
"loss": 0.4302, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5181550539744848, |
|
"grad_norm": 0.13134834595730802, |
|
"learning_rate": 1.9285783921832537e-05, |
|
"loss": 0.4405, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5201177625122669, |
|
"grad_norm": 0.1301982247003894, |
|
"learning_rate": 1.9279989121921846e-05, |
|
"loss": 0.4462, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5220804710500491, |
|
"grad_norm": 0.1321466010763688, |
|
"learning_rate": 1.927417178656975e-05, |
|
"loss": 0.4336, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5240431795878312, |
|
"grad_norm": 0.1495881568007904, |
|
"learning_rate": 1.9268331929903013e-05, |
|
"loss": 0.4365, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5260058881256133, |
|
"grad_norm": 0.2612207807396482, |
|
"learning_rate": 1.926246956610309e-05, |
|
"loss": 0.4457, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5279685966633955, |
|
"grad_norm": 0.9154910569331776, |
|
"learning_rate": 1.9256584709406093e-05, |
|
"loss": 0.4553, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5299313052011776, |
|
"grad_norm": 0.1534831664232736, |
|
"learning_rate": 1.9250677374102752e-05, |
|
"loss": 0.4441, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5318940137389597, |
|
"grad_norm": 0.14984085964867322, |
|
"learning_rate": 1.9244747574538387e-05, |
|
"loss": 0.4306, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5338567222767419, |
|
"grad_norm": 0.18760798027966705, |
|
"learning_rate": 1.9238795325112867e-05, |
|
"loss": 0.4538, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.535819430814524, |
|
"grad_norm": 0.1580188625628718, |
|
"learning_rate": 1.923282064028059e-05, |
|
"loss": 0.4443, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5377821393523062, |
|
"grad_norm": 0.14444473834290128, |
|
"learning_rate": 1.9226823534550418e-05, |
|
"loss": 0.4443, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5397448478900884, |
|
"grad_norm": 0.7330314247606119, |
|
"learning_rate": 1.9220804022485674e-05, |
|
"loss": 0.4638, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5417075564278705, |
|
"grad_norm": 0.17268497255370896, |
|
"learning_rate": 1.921476211870408e-05, |
|
"loss": 0.4503, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5436702649656526, |
|
"grad_norm": 0.18226707950341117, |
|
"learning_rate": 1.920869783787774e-05, |
|
"loss": 0.4383, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5456329735034348, |
|
"grad_norm": 0.5632696487176844, |
|
"learning_rate": 1.9202611194733107e-05, |
|
"loss": 0.4452, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5475956820412169, |
|
"grad_norm": 0.14836959675348457, |
|
"learning_rate": 1.9196502204050925e-05, |
|
"loss": 0.4314, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.549558390578999, |
|
"grad_norm": 0.34506199709023483, |
|
"learning_rate": 1.9190370880666206e-05, |
|
"loss": 0.4544, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5515210991167812, |
|
"grad_norm": 0.16441747745890337, |
|
"learning_rate": 1.9184217239468213e-05, |
|
"loss": 0.4429, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5534838076545633, |
|
"grad_norm": 0.16385435349212213, |
|
"learning_rate": 1.9178041295400383e-05, |
|
"loss": 0.46, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5554465161923454, |
|
"grad_norm": 0.16456135455973658, |
|
"learning_rate": 1.917184306346032e-05, |
|
"loss": 0.4399, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5574092247301276, |
|
"grad_norm": 0.14576272645037303, |
|
"learning_rate": 1.9165622558699763e-05, |
|
"loss": 0.4419, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5593719332679097, |
|
"grad_norm": 0.15456597025956298, |
|
"learning_rate": 1.9159379796224524e-05, |
|
"loss": 0.449, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5613346418056918, |
|
"grad_norm": 0.14297456086426616, |
|
"learning_rate": 1.9153114791194475e-05, |
|
"loss": 0.4371, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.563297350343474, |
|
"grad_norm": 0.14012901187447932, |
|
"learning_rate": 1.914682755882349e-05, |
|
"loss": 0.4408, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5652600588812562, |
|
"grad_norm": 0.13060314277737214, |
|
"learning_rate": 1.9140518114379433e-05, |
|
"loss": 0.4437, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5672227674190383, |
|
"grad_norm": 0.3090626051049369, |
|
"learning_rate": 1.91341864731841e-05, |
|
"loss": 0.4542, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5691854759568205, |
|
"grad_norm": 0.2179414551780141, |
|
"learning_rate": 1.912783265061319e-05, |
|
"loss": 0.4408, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5711481844946026, |
|
"grad_norm": 0.620377656428685, |
|
"learning_rate": 1.912145666209627e-05, |
|
"loss": 0.4879, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5731108930323847, |
|
"grad_norm": 0.1689086534993839, |
|
"learning_rate": 1.9115058523116734e-05, |
|
"loss": 0.4462, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5750736015701668, |
|
"grad_norm": 0.14611412522744563, |
|
"learning_rate": 1.910863824921176e-05, |
|
"loss": 0.4255, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.577036310107949, |
|
"grad_norm": 0.1882222162638847, |
|
"learning_rate": 1.9102195855972287e-05, |
|
"loss": 0.4425, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5789990186457311, |
|
"grad_norm": 0.1656815361127913, |
|
"learning_rate": 1.909573135904296e-05, |
|
"loss": 0.4283, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5809617271835132, |
|
"grad_norm": 0.13073397914317533, |
|
"learning_rate": 1.908924477412211e-05, |
|
"loss": 0.4558, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5829244357212954, |
|
"grad_norm": 0.1441943832714689, |
|
"learning_rate": 1.90827361169617e-05, |
|
"loss": 0.4427, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5848871442590775, |
|
"grad_norm": 0.12583976569411942, |
|
"learning_rate": 1.9076205403367287e-05, |
|
"loss": 0.4223, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5868498527968596, |
|
"grad_norm": 0.1755315808099223, |
|
"learning_rate": 1.9069652649198004e-05, |
|
"loss": 0.4421, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5888125613346418, |
|
"grad_norm": 0.12236948996858302, |
|
"learning_rate": 1.9063077870366504e-05, |
|
"loss": 0.4403, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5907752698724239, |
|
"grad_norm": 0.13958618892557004, |
|
"learning_rate": 1.905648108283891e-05, |
|
"loss": 0.4304, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.592737978410206, |
|
"grad_norm": 0.14279319639440627, |
|
"learning_rate": 1.90498623026348e-05, |
|
"loss": 0.4461, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5947006869479883, |
|
"grad_norm": 0.3909906198333688, |
|
"learning_rate": 1.9043221545827172e-05, |
|
"loss": 0.4845, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5966633954857704, |
|
"grad_norm": 0.37297343958892704, |
|
"learning_rate": 1.903655882854237e-05, |
|
"loss": 0.4494, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5986261040235525, |
|
"grad_norm": 0.8045487515732387, |
|
"learning_rate": 1.9029874166960075e-05, |
|
"loss": 0.4651, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6005888125613347, |
|
"grad_norm": 0.17675678913043202, |
|
"learning_rate": 1.9023167577313267e-05, |
|
"loss": 0.4342, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6025515210991168, |
|
"grad_norm": 0.4614395798632603, |
|
"learning_rate": 1.901643907588816e-05, |
|
"loss": 0.4666, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6045142296368989, |
|
"grad_norm": 0.24112777104201236, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.4608, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6064769381746811, |
|
"grad_norm": 0.16289640981921516, |
|
"learning_rate": 1.900291640311396e-05, |
|
"loss": 0.4347, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6084396467124632, |
|
"grad_norm": 0.16490807898196183, |
|
"learning_rate": 1.8996122264603202e-05, |
|
"loss": 0.4286, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6104023552502453, |
|
"grad_norm": 0.31752463155917027, |
|
"learning_rate": 1.8989306279990736e-05, |
|
"loss": 0.4721, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6123650637880275, |
|
"grad_norm": 0.16490476925454267, |
|
"learning_rate": 1.898246846582844e-05, |
|
"loss": 0.4482, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6143277723258096, |
|
"grad_norm": 0.15956184991382258, |
|
"learning_rate": 1.897560883872121e-05, |
|
"loss": 0.4417, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6162904808635917, |
|
"grad_norm": 0.1831073214143379, |
|
"learning_rate": 1.8968727415326885e-05, |
|
"loss": 0.4441, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6182531894013739, |
|
"grad_norm": 0.3402383015575775, |
|
"learning_rate": 1.896182421235626e-05, |
|
"loss": 0.4643, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.620215897939156, |
|
"grad_norm": 0.15056250902532822, |
|
"learning_rate": 1.895489924657301e-05, |
|
"loss": 0.4472, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6221786064769381, |
|
"grad_norm": 0.17384419824788128, |
|
"learning_rate": 1.8947952534793663e-05, |
|
"loss": 0.4455, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6241413150147204, |
|
"grad_norm": 0.18283666562150924, |
|
"learning_rate": 1.894098409388754e-05, |
|
"loss": 0.4425, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6261040235525025, |
|
"grad_norm": 0.18363348573791377, |
|
"learning_rate": 1.8933993940776753e-05, |
|
"loss": 0.4347, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6280667320902846, |
|
"grad_norm": 0.2173940703554356, |
|
"learning_rate": 1.8926982092436117e-05, |
|
"loss": 0.4426, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6300294406280668, |
|
"grad_norm": 0.13338358899432057, |
|
"learning_rate": 1.8919948565893144e-05, |
|
"loss": 0.4348, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6319921491658489, |
|
"grad_norm": 0.15291106806170288, |
|
"learning_rate": 1.8912893378227984e-05, |
|
"loss": 0.4441, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.633954857703631, |
|
"grad_norm": 0.19828098406270409, |
|
"learning_rate": 1.8905816546573398e-05, |
|
"loss": 0.4498, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6359175662414132, |
|
"grad_norm": 0.12561855756704574, |
|
"learning_rate": 1.8898718088114688e-05, |
|
"loss": 0.4501, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6378802747791953, |
|
"grad_norm": 0.413637068467992, |
|
"learning_rate": 1.8891598020089695e-05, |
|
"loss": 0.4522, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6398429833169774, |
|
"grad_norm": 0.13065710530489547, |
|
"learning_rate": 1.8884456359788725e-05, |
|
"loss": 0.4517, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6418056918547596, |
|
"grad_norm": 0.1255320805445076, |
|
"learning_rate": 1.887729312455452e-05, |
|
"loss": 0.4328, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.6437684003925417, |
|
"grad_norm": 0.17052107382561416, |
|
"learning_rate": 1.887010833178222e-05, |
|
"loss": 0.4335, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6457311089303238, |
|
"grad_norm": 0.1463632116200419, |
|
"learning_rate": 1.8862901998919305e-05, |
|
"loss": 0.4489, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.647693817468106, |
|
"grad_norm": 0.20054042393653457, |
|
"learning_rate": 1.8855674143465567e-05, |
|
"loss": 0.4415, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6496565260058881, |
|
"grad_norm": 0.12528395471108425, |
|
"learning_rate": 1.8848424782973075e-05, |
|
"loss": 0.4251, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6516192345436702, |
|
"grad_norm": 0.12724271501474496, |
|
"learning_rate": 1.8841153935046098e-05, |
|
"loss": 0.4226, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6535819430814525, |
|
"grad_norm": 0.12815480846276966, |
|
"learning_rate": 1.8833861617341108e-05, |
|
"loss": 0.4389, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6555446516192346, |
|
"grad_norm": 0.11881092687539777, |
|
"learning_rate": 1.8826547847566692e-05, |
|
"loss": 0.4301, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6575073601570167, |
|
"grad_norm": 0.11279466332160643, |
|
"learning_rate": 1.881921264348355e-05, |
|
"loss": 0.4441, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6594700686947988, |
|
"grad_norm": 0.12334758226552325, |
|
"learning_rate": 1.8811856022904423e-05, |
|
"loss": 0.4333, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.661432777232581, |
|
"grad_norm": 0.1268483867046061, |
|
"learning_rate": 1.8804478003694066e-05, |
|
"loss": 0.4317, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6633954857703631, |
|
"grad_norm": 0.12443337272902948, |
|
"learning_rate": 1.8797078603769184e-05, |
|
"loss": 0.4282, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6653581943081452, |
|
"grad_norm": 0.12713474397441266, |
|
"learning_rate": 1.878965784109842e-05, |
|
"loss": 0.4482, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6673209028459274, |
|
"grad_norm": 0.12143777103722954, |
|
"learning_rate": 1.8782215733702286e-05, |
|
"loss": 0.4342, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6692836113837095, |
|
"grad_norm": 0.23691651644437595, |
|
"learning_rate": 1.877475229965313e-05, |
|
"loss": 0.4467, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6712463199214916, |
|
"grad_norm": 0.13966524868065613, |
|
"learning_rate": 1.876726755707508e-05, |
|
"loss": 0.4119, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6732090284592738, |
|
"grad_norm": 0.1324720435854753, |
|
"learning_rate": 1.875976152414402e-05, |
|
"loss": 0.4327, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6751717369970559, |
|
"grad_norm": 0.1305963044017511, |
|
"learning_rate": 1.8752234219087538e-05, |
|
"loss": 0.4345, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.677134445534838, |
|
"grad_norm": 0.1256369439378997, |
|
"learning_rate": 1.8744685660184868e-05, |
|
"loss": 0.4373, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6790971540726202, |
|
"grad_norm": 0.1306844371007657, |
|
"learning_rate": 1.8737115865766865e-05, |
|
"loss": 0.4326, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6810598626104023, |
|
"grad_norm": 0.122862696340445, |
|
"learning_rate": 1.8729524854215942e-05, |
|
"loss": 0.4166, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6830225711481845, |
|
"grad_norm": 0.13535420265000028, |
|
"learning_rate": 1.8721912643966055e-05, |
|
"loss": 0.4237, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6849852796859667, |
|
"grad_norm": 0.11284294465947456, |
|
"learning_rate": 1.8714279253502616e-05, |
|
"loss": 0.4259, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6869479882237488, |
|
"grad_norm": 0.1193357989589537, |
|
"learning_rate": 1.8706624701362485e-05, |
|
"loss": 0.4467, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6889106967615309, |
|
"grad_norm": 0.1131129128247003, |
|
"learning_rate": 1.8698949006133903e-05, |
|
"loss": 0.4366, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6908734052993131, |
|
"grad_norm": 0.111747855862703, |
|
"learning_rate": 1.8691252186456465e-05, |
|
"loss": 0.4433, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6928361138370952, |
|
"grad_norm": 0.10862949730132815, |
|
"learning_rate": 1.8683534261021058e-05, |
|
"loss": 0.4299, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6947988223748773, |
|
"grad_norm": 0.12299814873161867, |
|
"learning_rate": 1.8675795248569816e-05, |
|
"loss": 0.4366, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6967615309126595, |
|
"grad_norm": 0.11664167882155978, |
|
"learning_rate": 1.866803516789609e-05, |
|
"loss": 0.4379, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6987242394504416, |
|
"grad_norm": 0.12925426587807418, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 0.4333, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7006869479882237, |
|
"grad_norm": 0.10893935119976447, |
|
"learning_rate": 1.8652451877310337e-05, |
|
"loss": 0.4293, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7026496565260059, |
|
"grad_norm": 0.1227760342543677, |
|
"learning_rate": 1.8644628705240636e-05, |
|
"loss": 0.4255, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.704612365063788, |
|
"grad_norm": 0.11162933142481653, |
|
"learning_rate": 1.8636784540633002e-05, |
|
"loss": 0.4277, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7065750736015701, |
|
"grad_norm": 0.11784511277232618, |
|
"learning_rate": 1.862891940253613e-05, |
|
"loss": 0.43, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7085377821393523, |
|
"grad_norm": 0.19349027686476353, |
|
"learning_rate": 1.8621033310049658e-05, |
|
"loss": 0.4388, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7105004906771345, |
|
"grad_norm": 0.11461754333230102, |
|
"learning_rate": 1.8613126282324092e-05, |
|
"loss": 0.4459, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.7124631992149166, |
|
"grad_norm": 0.10611440044540593, |
|
"learning_rate": 1.860519833856079e-05, |
|
"loss": 0.4286, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.7144259077526988, |
|
"grad_norm": 1.3919982408500475, |
|
"learning_rate": 1.8597249498011906e-05, |
|
"loss": 0.4271, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.7163886162904809, |
|
"grad_norm": 0.23104503037861346, |
|
"learning_rate": 1.858927977998032e-05, |
|
"loss": 0.4489, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.718351324828263, |
|
"grad_norm": 0.13423371418728916, |
|
"learning_rate": 1.858128920381963e-05, |
|
"loss": 0.4432, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7203140333660452, |
|
"grad_norm": 0.14035310994358413, |
|
"learning_rate": 1.8573277788934084e-05, |
|
"loss": 0.4436, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.7222767419038273, |
|
"grad_norm": 0.12538177756048358, |
|
"learning_rate": 1.8565245554778516e-05, |
|
"loss": 0.4152, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7242394504416094, |
|
"grad_norm": 0.1393704340215518, |
|
"learning_rate": 1.855719252085834e-05, |
|
"loss": 0.4599, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7262021589793916, |
|
"grad_norm": 0.15221198393758803, |
|
"learning_rate": 1.854911870672947e-05, |
|
"loss": 0.4346, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7281648675171737, |
|
"grad_norm": 0.13205252608835663, |
|
"learning_rate": 1.8541024131998277e-05, |
|
"loss": 0.433, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7301275760549558, |
|
"grad_norm": 0.1824152481964556, |
|
"learning_rate": 1.8532908816321557e-05, |
|
"loss": 0.4258, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.732090284592738, |
|
"grad_norm": 0.11473500921109103, |
|
"learning_rate": 1.852477277940647e-05, |
|
"loss": 0.4364, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7340529931305201, |
|
"grad_norm": 0.17550775961163126, |
|
"learning_rate": 1.8516616041010495e-05, |
|
"loss": 0.4311, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7360157016683022, |
|
"grad_norm": 0.11491081744632524, |
|
"learning_rate": 1.850843862094138e-05, |
|
"loss": 0.4299, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7379784102060843, |
|
"grad_norm": 0.12893078747082584, |
|
"learning_rate": 1.8500240539057093e-05, |
|
"loss": 0.4375, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7399411187438666, |
|
"grad_norm": 0.1386244465195304, |
|
"learning_rate": 1.849202181526579e-05, |
|
"loss": 0.4411, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7419038272816487, |
|
"grad_norm": 0.1363795250121767, |
|
"learning_rate": 1.848378246952574e-05, |
|
"loss": 0.4294, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.7438665358194309, |
|
"grad_norm": 0.11138728492984143, |
|
"learning_rate": 1.8475522521845296e-05, |
|
"loss": 0.4403, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.745829244357213, |
|
"grad_norm": 0.1238513979749556, |
|
"learning_rate": 1.8467241992282842e-05, |
|
"loss": 0.4267, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7477919528949951, |
|
"grad_norm": 0.12373568951195847, |
|
"learning_rate": 1.845894090094674e-05, |
|
"loss": 0.4312, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7497546614327772, |
|
"grad_norm": 0.19653148756122196, |
|
"learning_rate": 1.8450619267995283e-05, |
|
"loss": 0.4343, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7517173699705594, |
|
"grad_norm": 0.1394570505619246, |
|
"learning_rate": 1.8442277113636654e-05, |
|
"loss": 0.4373, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7536800785083415, |
|
"grad_norm": 0.12251934848515013, |
|
"learning_rate": 1.843391445812886e-05, |
|
"loss": 0.4267, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7536800785083415, |
|
"eval_loss": 0.43717867136001587, |
|
"eval_runtime": 245.9442, |
|
"eval_samples_per_second": 123.414, |
|
"eval_steps_per_second": 3.859, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7556427870461236, |
|
"grad_norm": 0.11928267779998, |
|
"learning_rate": 1.84255313217797e-05, |
|
"loss": 0.4411, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7576054955839058, |
|
"grad_norm": 0.10973043663025576, |
|
"learning_rate": 1.84171277249467e-05, |
|
"loss": 0.4266, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7595682041216879, |
|
"grad_norm": 0.6751166131440709, |
|
"learning_rate": 1.8408703688037088e-05, |
|
"loss": 0.4442, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.76153091265947, |
|
"grad_norm": 0.11525859230534183, |
|
"learning_rate": 1.8400259231507716e-05, |
|
"loss": 0.4257, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7634936211972522, |
|
"grad_norm": 0.14081912497481, |
|
"learning_rate": 1.8391794375865025e-05, |
|
"loss": 0.4559, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7654563297350343, |
|
"grad_norm": 0.1430446337142817, |
|
"learning_rate": 1.8383309141664992e-05, |
|
"loss": 0.4348, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7674190382728164, |
|
"grad_norm": 0.13701747456448043, |
|
"learning_rate": 1.837480354951308e-05, |
|
"loss": 0.4355, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7693817468105987, |
|
"grad_norm": 0.15468002573828674, |
|
"learning_rate": 1.83662776200642e-05, |
|
"loss": 0.4247, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7713444553483808, |
|
"grad_norm": 0.1359423923850112, |
|
"learning_rate": 1.8357731374022635e-05, |
|
"loss": 0.4378, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7733071638861629, |
|
"grad_norm": 0.12156559871467941, |
|
"learning_rate": 1.8349164832142015e-05, |
|
"loss": 0.433, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7752698724239451, |
|
"grad_norm": 1.1029116856223753, |
|
"learning_rate": 1.834057801522525e-05, |
|
"loss": 0.456, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7772325809617272, |
|
"grad_norm": 0.1498532799383752, |
|
"learning_rate": 1.833197094412449e-05, |
|
"loss": 0.4416, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7791952894995093, |
|
"grad_norm": 0.4425396937410584, |
|
"learning_rate": 1.832334363974107e-05, |
|
"loss": 0.4358, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7811579980372915, |
|
"grad_norm": 0.14741438537186383, |
|
"learning_rate": 1.8314696123025456e-05, |
|
"loss": 0.4432, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7831207065750736, |
|
"grad_norm": 1.3635940422702577, |
|
"learning_rate": 1.8306028414977196e-05, |
|
"loss": 0.4473, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7850834151128557, |
|
"grad_norm": 0.31067568125771744, |
|
"learning_rate": 1.8297340536644877e-05, |
|
"loss": 0.4341, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7870461236506379, |
|
"grad_norm": 0.23052926471546017, |
|
"learning_rate": 1.8288632509126064e-05, |
|
"loss": 0.4469, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.78900883218842, |
|
"grad_norm": 1.4150298223131084, |
|
"learning_rate": 1.827990435356725e-05, |
|
"loss": 0.4573, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7909715407262021, |
|
"grad_norm": 1.2166261643274474, |
|
"learning_rate": 1.8271156091163813e-05, |
|
"loss": 0.4623, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7929342492639843, |
|
"grad_norm": 8.194510529799096, |
|
"learning_rate": 1.826238774315995e-05, |
|
"loss": 0.9405, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7948969578017664, |
|
"grad_norm": 1.780892469368783, |
|
"learning_rate": 1.8253599330848638e-05, |
|
"loss": 0.6091, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7968596663395485, |
|
"grad_norm": 7.535603719678218, |
|
"learning_rate": 1.8244790875571582e-05, |
|
"loss": 0.9524, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7988223748773308, |
|
"grad_norm": 1.7773136251051938, |
|
"learning_rate": 1.823596239871915e-05, |
|
"loss": 0.6362, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8007850834151129, |
|
"grad_norm": 0.9044661668135322, |
|
"learning_rate": 1.8227113921730336e-05, |
|
"loss": 0.5439, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.802747791952895, |
|
"grad_norm": 2.7139783131315376, |
|
"learning_rate": 1.8218245466092704e-05, |
|
"loss": 0.5663, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8047105004906772, |
|
"grad_norm": 0.8834413599502067, |
|
"learning_rate": 1.8209357053342325e-05, |
|
"loss": 0.5256, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8066732090284593, |
|
"grad_norm": 1.1467415784558643, |
|
"learning_rate": 1.8200448705063748e-05, |
|
"loss": 0.5348, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.8086359175662414, |
|
"grad_norm": 0.515184372706671, |
|
"learning_rate": 1.819152044288992e-05, |
|
"loss": 0.5062, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.8105986261040236, |
|
"grad_norm": 0.6461620091644302, |
|
"learning_rate": 1.8182572288502154e-05, |
|
"loss": 0.5077, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.8125613346418057, |
|
"grad_norm": 0.4908151447534473, |
|
"learning_rate": 1.8173604263630066e-05, |
|
"loss": 0.4806, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.8145240431795878, |
|
"grad_norm": 0.3802375716765609, |
|
"learning_rate": 1.8164616390051523e-05, |
|
"loss": 0.4802, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.81648675171737, |
|
"grad_norm": 0.3805715926219177, |
|
"learning_rate": 1.8155608689592604e-05, |
|
"loss": 0.4814, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.8184494602551521, |
|
"grad_norm": 0.3945834169579088, |
|
"learning_rate": 1.814658118412752e-05, |
|
"loss": 0.4724, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.8204121687929342, |
|
"grad_norm": 0.2883495416963042, |
|
"learning_rate": 1.8137533895578585e-05, |
|
"loss": 0.4645, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.8223748773307163, |
|
"grad_norm": 0.2790010413679452, |
|
"learning_rate": 1.8128466845916156e-05, |
|
"loss": 0.4617, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.8243375858684985, |
|
"grad_norm": 0.2786295258327648, |
|
"learning_rate": 1.811938005715857e-05, |
|
"loss": 0.4791, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8263002944062807, |
|
"grad_norm": 0.23317317430153287, |
|
"learning_rate": 1.81102735513721e-05, |
|
"loss": 0.4434, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8282630029440629, |
|
"grad_norm": 0.2451241340574092, |
|
"learning_rate": 1.8101147350670905e-05, |
|
"loss": 0.4528, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.830225711481845, |
|
"grad_norm": 0.22624335497909545, |
|
"learning_rate": 1.8092001477216975e-05, |
|
"loss": 0.4644, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8321884200196271, |
|
"grad_norm": 0.23107219922308564, |
|
"learning_rate": 1.8082835953220055e-05, |
|
"loss": 0.4352, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8341511285574092, |
|
"grad_norm": 0.21335476872866976, |
|
"learning_rate": 1.8073650800937627e-05, |
|
"loss": 0.4515, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8361138370951914, |
|
"grad_norm": 0.2076816094768591, |
|
"learning_rate": 1.806444604267483e-05, |
|
"loss": 0.4544, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.8380765456329735, |
|
"grad_norm": 0.19718486185623377, |
|
"learning_rate": 1.805522170078441e-05, |
|
"loss": 0.4428, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.8400392541707556, |
|
"grad_norm": 0.227660993052998, |
|
"learning_rate": 1.8045977797666685e-05, |
|
"loss": 0.4443, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8420019627085378, |
|
"grad_norm": 0.21176408168289124, |
|
"learning_rate": 1.803671435576946e-05, |
|
"loss": 0.4617, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8439646712463199, |
|
"grad_norm": 0.16842294987099302, |
|
"learning_rate": 1.8027431397587993e-05, |
|
"loss": 0.462, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.845927379784102, |
|
"grad_norm": 0.2398849119706587, |
|
"learning_rate": 1.8018128945664936e-05, |
|
"loss": 0.4408, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8478900883218842, |
|
"grad_norm": 0.493692443958903, |
|
"learning_rate": 1.8008807022590283e-05, |
|
"loss": 0.4592, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8498527968596663, |
|
"grad_norm": 0.544249601841786, |
|
"learning_rate": 1.7999465651001297e-05, |
|
"loss": 0.4476, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8518155053974484, |
|
"grad_norm": 0.13924680999659736, |
|
"learning_rate": 1.7990104853582494e-05, |
|
"loss": 0.4281, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8537782139352306, |
|
"grad_norm": 0.173740825747816, |
|
"learning_rate": 1.7980724653065538e-05, |
|
"loss": 0.4269, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8557409224730128, |
|
"grad_norm": 0.15075725057898212, |
|
"learning_rate": 1.7971325072229227e-05, |
|
"loss": 0.4276, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8577036310107949, |
|
"grad_norm": 0.1448476326691961, |
|
"learning_rate": 1.7961906133899417e-05, |
|
"loss": 0.4259, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8596663395485771, |
|
"grad_norm": 0.15099110760456436, |
|
"learning_rate": 1.7952467860948975e-05, |
|
"loss": 0.4385, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8616290480863592, |
|
"grad_norm": 0.2908525767466193, |
|
"learning_rate": 1.7943010276297717e-05, |
|
"loss": 0.4532, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8635917566241413, |
|
"grad_norm": 0.19591315110166763, |
|
"learning_rate": 1.7933533402912354e-05, |
|
"loss": 0.4443, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8655544651619235, |
|
"grad_norm": 0.1427892817158057, |
|
"learning_rate": 1.792403726380644e-05, |
|
"loss": 0.4497, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8675171736997056, |
|
"grad_norm": 0.1557386790206549, |
|
"learning_rate": 1.791452188204031e-05, |
|
"loss": 0.4455, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8694798822374877, |
|
"grad_norm": 0.13076931277969595, |
|
"learning_rate": 1.7904987280721037e-05, |
|
"loss": 0.4273, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8714425907752699, |
|
"grad_norm": 0.17697976434079088, |
|
"learning_rate": 1.7895433483002356e-05, |
|
"loss": 0.4411, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.873405299313052, |
|
"grad_norm": 0.12603127769001743, |
|
"learning_rate": 1.7885860512084622e-05, |
|
"loss": 0.4254, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8753680078508341, |
|
"grad_norm": 0.7782445781966569, |
|
"learning_rate": 1.7876268391214756e-05, |
|
"loss": 0.4558, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8773307163886163, |
|
"grad_norm": 0.12647292734359114, |
|
"learning_rate": 1.786665714368617e-05, |
|
"loss": 0.4407, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8792934249263984, |
|
"grad_norm": 0.20575076444088897, |
|
"learning_rate": 1.785702679283874e-05, |
|
"loss": 0.438, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8812561334641805, |
|
"grad_norm": 0.19417352083348613, |
|
"learning_rate": 1.7847377362058712e-05, |
|
"loss": 0.4484, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8832188420019627, |
|
"grad_norm": 0.12756724963135646, |
|
"learning_rate": 1.7837708874778683e-05, |
|
"loss": 0.4348, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8851815505397449, |
|
"grad_norm": 0.11799761970570792, |
|
"learning_rate": 1.7828021354477515e-05, |
|
"loss": 0.437, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.887144259077527, |
|
"grad_norm": 0.1251248189106141, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 0.4302, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8891069676153092, |
|
"grad_norm": 0.11474241608076738, |
|
"learning_rate": 1.7808589308958284e-05, |
|
"loss": 0.4343, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8910696761530913, |
|
"grad_norm": 0.12586354241305017, |
|
"learning_rate": 1.7798844830928818e-05, |
|
"loss": 0.4348, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8930323846908734, |
|
"grad_norm": 0.19899892032690525, |
|
"learning_rate": 1.778908141425531e-05, |
|
"loss": 0.4416, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8949950932286556, |
|
"grad_norm": 0.21889266119451378, |
|
"learning_rate": 1.777929908264715e-05, |
|
"loss": 0.431, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8969578017664377, |
|
"grad_norm": 0.1247721576190928, |
|
"learning_rate": 1.7769497859859664e-05, |
|
"loss": 0.4268, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8989205103042198, |
|
"grad_norm": 0.1848738794336329, |
|
"learning_rate": 1.775967776969405e-05, |
|
"loss": 0.4278, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.900883218842002, |
|
"grad_norm": 0.14033082433848945, |
|
"learning_rate": 1.774983883599733e-05, |
|
"loss": 0.4202, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.9028459273797841, |
|
"grad_norm": 0.1730390019850834, |
|
"learning_rate": 1.7739981082662275e-05, |
|
"loss": 0.4298, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9048086359175662, |
|
"grad_norm": 0.11693891835416576, |
|
"learning_rate": 1.773010453362737e-05, |
|
"loss": 0.4448, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.9067713444553483, |
|
"grad_norm": 0.12196405283755252, |
|
"learning_rate": 1.772020921287674e-05, |
|
"loss": 0.4378, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.9087340529931305, |
|
"grad_norm": 0.11586861714009496, |
|
"learning_rate": 1.771029514444008e-05, |
|
"loss": 0.4471, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.9106967615309126, |
|
"grad_norm": 0.12574469171427458, |
|
"learning_rate": 1.7700362352392632e-05, |
|
"loss": 0.4385, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.9126594700686947, |
|
"grad_norm": 0.12237397310589682, |
|
"learning_rate": 1.7690410860855095e-05, |
|
"loss": 0.4309, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.914622178606477, |
|
"grad_norm": 0.11990527876395565, |
|
"learning_rate": 1.7680440693993586e-05, |
|
"loss": 0.4336, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.9165848871442591, |
|
"grad_norm": 0.11451672776029655, |
|
"learning_rate": 1.7670451876019562e-05, |
|
"loss": 0.4298, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.9185475956820413, |
|
"grad_norm": 0.10316051482791179, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.4331, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.9205103042198234, |
|
"grad_norm": 0.12496652874488692, |
|
"learning_rate": 1.7650418383806233e-05, |
|
"loss": 0.4354, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.9224730127576055, |
|
"grad_norm": 0.11863765454148498, |
|
"learning_rate": 1.7640373758216075e-05, |
|
"loss": 0.4352, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9244357212953876, |
|
"grad_norm": 0.11413588741219476, |
|
"learning_rate": 1.763031057881159e-05, |
|
"loss": 0.4302, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.9263984298331698, |
|
"grad_norm": 0.11625749644832431, |
|
"learning_rate": 1.762022887003011e-05, |
|
"loss": 0.4336, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.9283611383709519, |
|
"grad_norm": 0.11492501049851209, |
|
"learning_rate": 1.761012865635396e-05, |
|
"loss": 0.422, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.930323846908734, |
|
"grad_norm": 0.12563889502937342, |
|
"learning_rate": 1.7600009962310417e-05, |
|
"loss": 0.4348, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.9322865554465162, |
|
"grad_norm": 0.12443828665681882, |
|
"learning_rate": 1.758987281247162e-05, |
|
"loss": 0.4242, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9342492639842983, |
|
"grad_norm": 0.11644777576344677, |
|
"learning_rate": 1.757971723145453e-05, |
|
"loss": 0.4194, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.9362119725220804, |
|
"grad_norm": 0.10793194810684831, |
|
"learning_rate": 1.7569543243920873e-05, |
|
"loss": 0.4345, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.9381746810598626, |
|
"grad_norm": 0.11594061352654564, |
|
"learning_rate": 1.7559350874577066e-05, |
|
"loss": 0.4314, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9401373895976447, |
|
"grad_norm": 0.10332500150437529, |
|
"learning_rate": 1.754914014817416e-05, |
|
"loss": 0.4296, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9421000981354269, |
|
"grad_norm": 0.11644279502007197, |
|
"learning_rate": 1.75389110895078e-05, |
|
"loss": 0.429, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9440628066732091, |
|
"grad_norm": 0.10948538558656842, |
|
"learning_rate": 1.7528663723418137e-05, |
|
"loss": 0.4173, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9460255152109912, |
|
"grad_norm": 0.0994509640223274, |
|
"learning_rate": 1.7518398074789776e-05, |
|
"loss": 0.4205, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9479882237487733, |
|
"grad_norm": 0.9276868697631023, |
|
"learning_rate": 1.750811416855173e-05, |
|
"loss": 0.4673, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9499509322865555, |
|
"grad_norm": 0.1374275322216112, |
|
"learning_rate": 1.7497812029677344e-05, |
|
"loss": 0.4242, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9519136408243376, |
|
"grad_norm": 0.38132564392520824, |
|
"learning_rate": 1.7487491683184236e-05, |
|
"loss": 0.4371, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9538763493621197, |
|
"grad_norm": 1.7151988276134096, |
|
"learning_rate": 1.7477153154134244e-05, |
|
"loss": 0.4803, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9558390578999019, |
|
"grad_norm": 0.32545933630412127, |
|
"learning_rate": 1.7466796467633357e-05, |
|
"loss": 0.4369, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.957801766437684, |
|
"grad_norm": 0.1866079133255275, |
|
"learning_rate": 1.7456421648831658e-05, |
|
"loss": 0.4334, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9597644749754661, |
|
"grad_norm": 0.3272314043287591, |
|
"learning_rate": 1.7446028722923266e-05, |
|
"loss": 0.4443, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9617271835132483, |
|
"grad_norm": 0.16996374422654656, |
|
"learning_rate": 1.743561771514626e-05, |
|
"loss": 0.4366, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9636898920510304, |
|
"grad_norm": 0.15827075993181033, |
|
"learning_rate": 1.7425188650782648e-05, |
|
"loss": 0.436, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.9656526005888125, |
|
"grad_norm": 0.43165470913413256, |
|
"learning_rate": 1.741474155515827e-05, |
|
"loss": 0.4433, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.9676153091265947, |
|
"grad_norm": 1.745564670186826, |
|
"learning_rate": 1.7404276453642755e-05, |
|
"loss": 0.546, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9695780176643768, |
|
"grad_norm": 0.8077780518520157, |
|
"learning_rate": 1.739379337164946e-05, |
|
"loss": 0.4497, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.971540726202159, |
|
"grad_norm": 0.474709975296865, |
|
"learning_rate": 1.738329233463542e-05, |
|
"loss": 0.4496, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9735034347399412, |
|
"grad_norm": 0.26095766576170426, |
|
"learning_rate": 1.737277336810124e-05, |
|
"loss": 0.4544, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9754661432777233, |
|
"grad_norm": 0.2912691308833845, |
|
"learning_rate": 1.7362236497591097e-05, |
|
"loss": 0.4423, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9774288518155054, |
|
"grad_norm": 0.24976229543285455, |
|
"learning_rate": 1.7351681748692622e-05, |
|
"loss": 0.4398, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9793915603532876, |
|
"grad_norm": 0.20231287847225168, |
|
"learning_rate": 1.7341109147036873e-05, |
|
"loss": 0.4467, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9813542688910697, |
|
"grad_norm": 0.2412935338809819, |
|
"learning_rate": 1.7330518718298263e-05, |
|
"loss": 0.4357, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9833169774288518, |
|
"grad_norm": 0.18003467387447356, |
|
"learning_rate": 1.7319910488194494e-05, |
|
"loss": 0.4318, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.985279685966634, |
|
"grad_norm": 0.20072691034722498, |
|
"learning_rate": 1.7309284482486494e-05, |
|
"loss": 0.4458, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9872423945044161, |
|
"grad_norm": 0.19649513698293491, |
|
"learning_rate": 1.7298640726978357e-05, |
|
"loss": 0.4514, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9892051030421982, |
|
"grad_norm": 0.2071441349062203, |
|
"learning_rate": 1.7287979247517285e-05, |
|
"loss": 0.439, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9911678115799804, |
|
"grad_norm": 0.20373439560246617, |
|
"learning_rate": 1.7277300069993515e-05, |
|
"loss": 0.4205, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9931305201177625, |
|
"grad_norm": 0.18699843473849642, |
|
"learning_rate": 1.7266603220340273e-05, |
|
"loss": 0.4264, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9950932286555446, |
|
"grad_norm": 0.1600200894069467, |
|
"learning_rate": 1.725588872453368e-05, |
|
"loss": 0.4277, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9970559371933267, |
|
"grad_norm": 0.18202529308383952, |
|
"learning_rate": 1.7245156608592727e-05, |
|
"loss": 0.4491, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9990186457311089, |
|
"grad_norm": 0.15887455063463465, |
|
"learning_rate": 1.7234406898579187e-05, |
|
"loss": 0.4558, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.15887455063463465, |
|
"learning_rate": 1.7223639620597556e-05, |
|
"loss": 0.209, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0019627085377822, |
|
"grad_norm": 0.1825187909705365, |
|
"learning_rate": 1.7212854800794998e-05, |
|
"loss": 0.3468, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.0039254170755643, |
|
"grad_norm": 0.20957758969625895, |
|
"learning_rate": 1.7202052465361268e-05, |
|
"loss": 0.3492, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.0039254170755643, |
|
"eval_loss": 0.4533812999725342, |
|
"eval_runtime": 245.7967, |
|
"eval_samples_per_second": 123.488, |
|
"eval_steps_per_second": 3.861, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.0058881256133465, |
|
"grad_norm": 0.28069305660927224, |
|
"learning_rate": 1.719123264052866e-05, |
|
"loss": 0.339, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.0078508341511285, |
|
"grad_norm": 0.19132375047173436, |
|
"learning_rate": 1.718039535257194e-05, |
|
"loss": 0.3261, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.0098135426889108, |
|
"grad_norm": 0.17431307028109386, |
|
"learning_rate": 1.7169540627808276e-05, |
|
"loss": 0.3514, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.0117762512266928, |
|
"grad_norm": 0.5111559402715384, |
|
"learning_rate": 1.7158668492597186e-05, |
|
"loss": 0.3241, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.013738959764475, |
|
"grad_norm": 0.16901933614697587, |
|
"learning_rate": 1.7147778973340466e-05, |
|
"loss": 0.3228, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.015701668302257, |
|
"grad_norm": 0.15876073778651265, |
|
"learning_rate": 1.7136872096482123e-05, |
|
"loss": 0.4057, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.0176643768400393, |
|
"grad_norm": 2.1272381042420556, |
|
"learning_rate": 1.7125947888508322e-05, |
|
"loss": 0.3404, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.0196270853778213, |
|
"grad_norm": 0.19402112582649061, |
|
"learning_rate": 1.7115006375947304e-05, |
|
"loss": 0.3396, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0215897939156036, |
|
"grad_norm": 0.184906049478743, |
|
"learning_rate": 1.7104047585369345e-05, |
|
"loss": 0.3335, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.0235525024533856, |
|
"grad_norm": 0.2027640255413657, |
|
"learning_rate": 1.7093071543386667e-05, |
|
"loss": 0.3336, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.0255152109911678, |
|
"grad_norm": 0.1592888909141134, |
|
"learning_rate": 1.7082078276653392e-05, |
|
"loss": 0.3224, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.0274779195289498, |
|
"grad_norm": 0.2560733378030634, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.3356, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.029440628066732, |
|
"grad_norm": 0.15861327684708404, |
|
"learning_rate": 1.7060040175760626e-05, |
|
"loss": 0.3237, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0314033366045143, |
|
"grad_norm": 0.135405942028162, |
|
"learning_rate": 1.7048995395118253e-05, |
|
"loss": 0.3417, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0333660451422964, |
|
"grad_norm": 0.15133569201059896, |
|
"learning_rate": 1.7037933496759404e-05, |
|
"loss": 0.3343, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0353287536800786, |
|
"grad_norm": 0.134069507215934, |
|
"learning_rate": 1.7026854507546694e-05, |
|
"loss": 0.3338, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.0372914622178606, |
|
"grad_norm": 18.90339450893893, |
|
"learning_rate": 1.7015758454384234e-05, |
|
"loss": 0.346, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.0392541707556429, |
|
"grad_norm": 0.19965137004126413, |
|
"learning_rate": 1.7004645364217584e-05, |
|
"loss": 0.3264, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0412168792934249, |
|
"grad_norm": 0.16909173895106233, |
|
"learning_rate": 1.699351526403367e-05, |
|
"loss": 0.3292, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.0431795878312071, |
|
"grad_norm": 0.2342868214892783, |
|
"learning_rate": 1.698236818086073e-05, |
|
"loss": 0.3262, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.0451422963689891, |
|
"grad_norm": 0.18469564769004154, |
|
"learning_rate": 1.6971204141768235e-05, |
|
"loss": 0.3296, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.0471050049067714, |
|
"grad_norm": 0.157241496947566, |
|
"learning_rate": 1.6960023173866834e-05, |
|
"loss": 0.3347, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.0490677134445534, |
|
"grad_norm": 0.17900244071577498, |
|
"learning_rate": 1.6948825304308293e-05, |
|
"loss": 0.3309, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0510304219823356, |
|
"grad_norm": 0.14808820544169662, |
|
"learning_rate": 1.693761056028542e-05, |
|
"loss": 0.3302, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.0529931305201177, |
|
"grad_norm": 0.13596938535164, |
|
"learning_rate": 1.6926378969031988e-05, |
|
"loss": 0.3311, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.0549558390579, |
|
"grad_norm": 0.14432634269560748, |
|
"learning_rate": 1.6915130557822698e-05, |
|
"loss": 0.3305, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.056918547595682, |
|
"grad_norm": 0.1499905448155453, |
|
"learning_rate": 1.6903865353973087e-05, |
|
"loss": 0.32, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.0588812561334642, |
|
"grad_norm": 0.15652727350645462, |
|
"learning_rate": 1.689258338483947e-05, |
|
"loss": 0.3289, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0608439646712464, |
|
"grad_norm": 0.12161904132806402, |
|
"learning_rate": 1.6881284677818892e-05, |
|
"loss": 0.3167, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.0628066732090284, |
|
"grad_norm": 0.12695759397898215, |
|
"learning_rate": 1.686996926034902e-05, |
|
"loss": 0.3227, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.0647693817468107, |
|
"grad_norm": 0.12620759014174157, |
|
"learning_rate": 1.685863715990811e-05, |
|
"loss": 0.3273, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.0667320902845927, |
|
"grad_norm": 0.12300912173446868, |
|
"learning_rate": 1.6847288404014937e-05, |
|
"loss": 0.3234, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.068694798822375, |
|
"grad_norm": 0.13604294154259072, |
|
"learning_rate": 1.6835923020228714e-05, |
|
"loss": 0.3218, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.070657507360157, |
|
"grad_norm": 0.12911000723405103, |
|
"learning_rate": 1.682454103614904e-05, |
|
"loss": 0.3199, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.0726202158979392, |
|
"grad_norm": 0.12394146843939333, |
|
"learning_rate": 1.6813142479415815e-05, |
|
"loss": 0.318, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.0745829244357212, |
|
"grad_norm": 0.11914666168696621, |
|
"learning_rate": 1.6801727377709195e-05, |
|
"loss": 0.3225, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.0765456329735035, |
|
"grad_norm": 0.3745213254445056, |
|
"learning_rate": 1.6790295758749512e-05, |
|
"loss": 0.3244, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.0785083415112855, |
|
"grad_norm": 0.11387952522253666, |
|
"learning_rate": 1.67788476502972e-05, |
|
"loss": 0.327, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0804710500490677, |
|
"grad_norm": 0.1996105910656074, |
|
"learning_rate": 1.6767383080152744e-05, |
|
"loss": 0.3461, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.0824337585868498, |
|
"grad_norm": 0.12175257973633413, |
|
"learning_rate": 1.6755902076156606e-05, |
|
"loss": 0.3153, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.084396467124632, |
|
"grad_norm": 0.14527489586682849, |
|
"learning_rate": 1.6744404666189146e-05, |
|
"loss": 0.3265, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.0863591756624142, |
|
"grad_norm": 0.15226241276567778, |
|
"learning_rate": 1.6732890878170573e-05, |
|
"loss": 0.3289, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.0883218842001963, |
|
"grad_norm": 0.16932705018976396, |
|
"learning_rate": 1.6721360740060864e-05, |
|
"loss": 0.3256, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0902845927379785, |
|
"grad_norm": 0.3186308522830804, |
|
"learning_rate": 1.67098142798597e-05, |
|
"loss": 0.3293, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.0922473012757605, |
|
"grad_norm": 0.12034379856239824, |
|
"learning_rate": 1.669825152560641e-05, |
|
"loss": 0.3211, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.0942100098135428, |
|
"grad_norm": 0.2213258321448838, |
|
"learning_rate": 1.668667250537987e-05, |
|
"loss": 0.3157, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.0961727183513248, |
|
"grad_norm": 0.4014643343300227, |
|
"learning_rate": 1.6675077247298475e-05, |
|
"loss": 0.3307, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.098135426889107, |
|
"grad_norm": 0.14678019949020815, |
|
"learning_rate": 1.6663465779520042e-05, |
|
"loss": 0.3341, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.100098135426889, |
|
"grad_norm": 0.11691608425931684, |
|
"learning_rate": 1.665183813024175e-05, |
|
"loss": 0.3207, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.1020608439646713, |
|
"grad_norm": 0.13749331512464025, |
|
"learning_rate": 1.6640194327700087e-05, |
|
"loss": 0.3212, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.1040235525024533, |
|
"grad_norm": 0.11763407718323214, |
|
"learning_rate": 1.6628534400170746e-05, |
|
"loss": 0.3184, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.1059862610402356, |
|
"grad_norm": 0.10763049323804355, |
|
"learning_rate": 1.6616858375968596e-05, |
|
"loss": 0.3255, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.1079489695780176, |
|
"grad_norm": 0.19415854323907672, |
|
"learning_rate": 1.6605166283447587e-05, |
|
"loss": 0.3265, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.1099116781157998, |
|
"grad_norm": 0.11392759854867497, |
|
"learning_rate": 1.659345815100069e-05, |
|
"loss": 0.3207, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.1118743866535818, |
|
"grad_norm": 0.11596195257412814, |
|
"learning_rate": 1.658173400705983e-05, |
|
"loss": 0.3359, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.113837095191364, |
|
"grad_norm": 0.1447901138867746, |
|
"learning_rate": 1.6569993880095807e-05, |
|
"loss": 0.3104, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.1157998037291463, |
|
"grad_norm": 0.11274072609698226, |
|
"learning_rate": 1.6558237798618243e-05, |
|
"loss": 0.3207, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.1177625122669284, |
|
"grad_norm": 0.1066641260817179, |
|
"learning_rate": 1.6546465791175498e-05, |
|
"loss": 0.33, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1197252208047106, |
|
"grad_norm": 0.10845080737415526, |
|
"learning_rate": 1.6534677886354605e-05, |
|
"loss": 0.3324, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.1216879293424926, |
|
"grad_norm": 0.109524857570649, |
|
"learning_rate": 1.6522874112781213e-05, |
|
"loss": 0.33, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.1236506378802749, |
|
"grad_norm": 0.1086194965309956, |
|
"learning_rate": 1.6511054499119493e-05, |
|
"loss": 0.3318, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.1256133464180569, |
|
"grad_norm": 0.1097485465672784, |
|
"learning_rate": 1.6499219074072087e-05, |
|
"loss": 0.3201, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.1275760549558391, |
|
"grad_norm": 0.12784106789186606, |
|
"learning_rate": 1.6487367866380037e-05, |
|
"loss": 0.3277, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.1295387634936211, |
|
"grad_norm": 0.11285311191327178, |
|
"learning_rate": 1.6475500904822707e-05, |
|
"loss": 0.3249, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.1315014720314034, |
|
"grad_norm": 0.1024337781066779, |
|
"learning_rate": 1.646361821821772e-05, |
|
"loss": 0.3366, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.1334641805691854, |
|
"grad_norm": 0.11906272118674736, |
|
"learning_rate": 1.645171983542088e-05, |
|
"loss": 0.3339, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1354268891069677, |
|
"grad_norm": 0.11321533330226549, |
|
"learning_rate": 1.6439805785326114e-05, |
|
"loss": 0.3276, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.1373895976447497, |
|
"grad_norm": 0.10571636728133642, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 0.3289, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.139352306182532, |
|
"grad_norm": 0.22333265835518828, |
|
"learning_rate": 1.6415930799008668e-05, |
|
"loss": 0.325, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.141315014720314, |
|
"grad_norm": 0.10810955681560351, |
|
"learning_rate": 1.640396992076379e-05, |
|
"loss": 0.4055, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.1432777232580962, |
|
"grad_norm": 0.26109788537078404, |
|
"learning_rate": 1.6391993491176445e-05, |
|
"loss": 0.3162, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.1452404317958784, |
|
"grad_norm": 0.11788156500671607, |
|
"learning_rate": 1.6380001539330088e-05, |
|
"loss": 0.3132, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.1472031403336604, |
|
"grad_norm": 0.111527885844509, |
|
"learning_rate": 1.6367994094345864e-05, |
|
"loss": 0.3213, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.1491658488714427, |
|
"grad_norm": 0.10923834950482783, |
|
"learning_rate": 1.6355971185382547e-05, |
|
"loss": 0.3235, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.1511285574092247, |
|
"grad_norm": 0.11352228058272218, |
|
"learning_rate": 1.6343932841636455e-05, |
|
"loss": 0.3242, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.153091265947007, |
|
"grad_norm": 2.442797565842492, |
|
"learning_rate": 1.6331879092341402e-05, |
|
"loss": 0.3605, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.155053974484789, |
|
"grad_norm": 0.1340497478004765, |
|
"learning_rate": 1.631980996676859e-05, |
|
"loss": 0.3231, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.1570166830225712, |
|
"grad_norm": 7.251138564125245, |
|
"learning_rate": 1.6307725494226586e-05, |
|
"loss": 0.464, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.1589793915603532, |
|
"grad_norm": 1.3172662093711907, |
|
"learning_rate": 1.6295625704061204e-05, |
|
"loss": 0.3337, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.1609421000981355, |
|
"grad_norm": 0.31327939692037343, |
|
"learning_rate": 1.6283510625655474e-05, |
|
"loss": 0.3278, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.1629048086359175, |
|
"grad_norm": 4.811713971885485, |
|
"learning_rate": 1.6271380288429535e-05, |
|
"loss": 0.3485, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.1648675171736997, |
|
"grad_norm": 1.2548300965536887, |
|
"learning_rate": 1.6259234721840595e-05, |
|
"loss": 0.333, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.1668302257114818, |
|
"grad_norm": 0.2330658914469722, |
|
"learning_rate": 1.624707395538283e-05, |
|
"loss": 0.3294, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.168792934249264, |
|
"grad_norm": 0.24251502191211774, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.3244, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.170755642787046, |
|
"grad_norm": 0.235968588849149, |
|
"learning_rate": 1.6222706941022054e-05, |
|
"loss": 0.3284, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.1727183513248283, |
|
"grad_norm": 0.2248234241197579, |
|
"learning_rate": 1.6210500752291682e-05, |
|
"loss": 0.3391, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.1746810598626105, |
|
"grad_norm": 0.4542190820206638, |
|
"learning_rate": 1.6198279482037617e-05, |
|
"loss": 0.3306, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.1766437684003925, |
|
"grad_norm": 0.19360720341661192, |
|
"learning_rate": 1.6186043159937884e-05, |
|
"loss": 0.3293, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1786064769381748, |
|
"grad_norm": 0.33521960291085745, |
|
"learning_rate": 1.6173791815707053e-05, |
|
"loss": 0.3296, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.1805691854759568, |
|
"grad_norm": 0.1676340468101461, |
|
"learning_rate": 1.616152547909618e-05, |
|
"loss": 0.3227, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.182531894013739, |
|
"grad_norm": 0.166589926871346, |
|
"learning_rate": 1.614924417989272e-05, |
|
"loss": 0.3124, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.184494602551521, |
|
"grad_norm": 0.6062179431946622, |
|
"learning_rate": 1.6136947947920477e-05, |
|
"loss": 0.3542, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.1864573110893033, |
|
"grad_norm": 0.3148233399749341, |
|
"learning_rate": 1.6124636813039502e-05, |
|
"loss": 0.3583, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.1884200196270853, |
|
"grad_norm": 0.16441596245061335, |
|
"learning_rate": 1.611231080514605e-05, |
|
"loss": 0.3295, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.1903827281648676, |
|
"grad_norm": 0.16022712151086638, |
|
"learning_rate": 1.609996995417248e-05, |
|
"loss": 0.3271, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.1923454367026496, |
|
"grad_norm": 0.29243754067572564, |
|
"learning_rate": 1.608761429008721e-05, |
|
"loss": 0.3337, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.1943081452404318, |
|
"grad_norm": 0.15303231575175122, |
|
"learning_rate": 1.6075243842894614e-05, |
|
"loss": 0.336, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.1962708537782138, |
|
"grad_norm": 0.14650148016034228, |
|
"learning_rate": 1.606285864263498e-05, |
|
"loss": 0.329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.198233562315996, |
|
"grad_norm": 0.15261136727699032, |
|
"learning_rate": 1.605045871938441e-05, |
|
"loss": 0.3218, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.2001962708537781, |
|
"grad_norm": 0.21026340819790304, |
|
"learning_rate": 1.6038044103254775e-05, |
|
"loss": 0.3292, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.2021589793915604, |
|
"grad_norm": 0.13503564973923055, |
|
"learning_rate": 1.6025614824393606e-05, |
|
"loss": 0.326, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.2041216879293426, |
|
"grad_norm": 0.1312176723931598, |
|
"learning_rate": 1.601317091298406e-05, |
|
"loss": 0.3258, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.2060843964671246, |
|
"grad_norm": 0.14448732114233953, |
|
"learning_rate": 1.6000712399244813e-05, |
|
"loss": 0.3358, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.2080471050049069, |
|
"grad_norm": 0.12820019901824906, |
|
"learning_rate": 1.5988239313430004e-05, |
|
"loss": 0.3186, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.2100098135426889, |
|
"grad_norm": 0.2613273277747731, |
|
"learning_rate": 1.5975751685829167e-05, |
|
"loss": 0.337, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.2119725220804711, |
|
"grad_norm": 0.1134463713652465, |
|
"learning_rate": 1.5963249546767144e-05, |
|
"loss": 0.3217, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.2139352306182531, |
|
"grad_norm": 0.12611151264418188, |
|
"learning_rate": 1.5950732926604012e-05, |
|
"loss": 0.3182, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.2158979391560354, |
|
"grad_norm": 0.11276212188506385, |
|
"learning_rate": 1.5938201855735017e-05, |
|
"loss": 0.3227, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.2178606476938174, |
|
"grad_norm": 0.12586598314647854, |
|
"learning_rate": 1.5925656364590504e-05, |
|
"loss": 0.3228, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.2198233562315997, |
|
"grad_norm": 0.23240521031529912, |
|
"learning_rate": 1.5913096483635827e-05, |
|
"loss": 0.3173, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.2217860647693817, |
|
"grad_norm": 0.12306625560657121, |
|
"learning_rate": 1.5900522243371283e-05, |
|
"loss": 0.3176, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.223748773307164, |
|
"grad_norm": 0.11574270115288804, |
|
"learning_rate": 1.5887933674332048e-05, |
|
"loss": 0.3362, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.225711481844946, |
|
"grad_norm": 0.113799124679694, |
|
"learning_rate": 1.587533080708809e-05, |
|
"loss": 0.3279, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.2276741903827282, |
|
"grad_norm": 0.11649342842785392, |
|
"learning_rate": 1.5862713672244092e-05, |
|
"loss": 0.3276, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.2296368989205102, |
|
"grad_norm": 0.25352594498528047, |
|
"learning_rate": 1.5850082300439395e-05, |
|
"loss": 0.3241, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.2315996074582924, |
|
"grad_norm": 0.16227315088252345, |
|
"learning_rate": 1.5837436722347902e-05, |
|
"loss": 0.3303, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.2335623159960747, |
|
"grad_norm": 0.1678226345146868, |
|
"learning_rate": 1.5824776968678024e-05, |
|
"loss": 0.3315, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.2355250245338567, |
|
"grad_norm": 0.13075124341701522, |
|
"learning_rate": 1.5812103070172592e-05, |
|
"loss": 0.3108, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.237487733071639, |
|
"grad_norm": 0.11601688723318539, |
|
"learning_rate": 1.5799415057608785e-05, |
|
"loss": 0.3264, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.239450441609421, |
|
"grad_norm": 0.11324740404685361, |
|
"learning_rate": 1.578671296179806e-05, |
|
"loss": 0.3177, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.2414131501472032, |
|
"grad_norm": 0.11840350625128808, |
|
"learning_rate": 1.5773996813586067e-05, |
|
"loss": 0.3354, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.2433758586849852, |
|
"grad_norm": 0.13875834597920514, |
|
"learning_rate": 1.5761266643852587e-05, |
|
"loss": 0.317, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.2453385672227675, |
|
"grad_norm": 0.117375827430945, |
|
"learning_rate": 1.574852248351145e-05, |
|
"loss": 0.3234, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.2473012757605495, |
|
"grad_norm": 0.11942850208082942, |
|
"learning_rate": 1.573576436351046e-05, |
|
"loss": 0.3087, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.2492639842983317, |
|
"grad_norm": 0.1335866448171688, |
|
"learning_rate": 1.572299231483132e-05, |
|
"loss": 0.3296, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.2512266928361138, |
|
"grad_norm": 0.12470555907017847, |
|
"learning_rate": 1.5710206368489555e-05, |
|
"loss": 0.3273, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.253189401373896, |
|
"grad_norm": 0.1757334780641794, |
|
"learning_rate": 1.569740655553444e-05, |
|
"loss": 0.3206, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.2551521099116782, |
|
"grad_norm": 0.11262712029964689, |
|
"learning_rate": 1.5684592907048925e-05, |
|
"loss": 0.3265, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.2551521099116782, |
|
"eval_loss": 0.4366247355937958, |
|
"eval_runtime": 245.4297, |
|
"eval_samples_per_second": 123.673, |
|
"eval_steps_per_second": 3.867, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.2571148184494603, |
|
"grad_norm": 0.13469264864574937, |
|
"learning_rate": 1.5671765454149558e-05, |
|
"loss": 0.3286, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.2590775269872423, |
|
"grad_norm": 0.12053610787587447, |
|
"learning_rate": 1.5658924227986415e-05, |
|
"loss": 0.3275, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.2610402355250245, |
|
"grad_norm": 0.14172736929838212, |
|
"learning_rate": 1.5646069259743007e-05, |
|
"loss": 0.3323, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.2630029440628068, |
|
"grad_norm": 0.13230460831867666, |
|
"learning_rate": 1.563320058063622e-05, |
|
"loss": 0.3406, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.2649656526005888, |
|
"grad_norm": 0.11391731055963321, |
|
"learning_rate": 1.5620318221916245e-05, |
|
"loss": 0.3321, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.2669283611383708, |
|
"grad_norm": 0.20710778775533598, |
|
"learning_rate": 1.560742221486648e-05, |
|
"loss": 0.3233, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.268891069676153, |
|
"grad_norm": 0.12086849482365936, |
|
"learning_rate": 1.5594512590803476e-05, |
|
"loss": 0.3227, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.2708537782139353, |
|
"grad_norm": 0.12570156067267854, |
|
"learning_rate": 1.5581589381076843e-05, |
|
"loss": 0.3219, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.2728164867517173, |
|
"grad_norm": 0.12011193178735968, |
|
"learning_rate": 1.556865261706918e-05, |
|
"loss": 0.3182, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.2747791952894996, |
|
"grad_norm": 0.1648783672119669, |
|
"learning_rate": 1.5555702330196024e-05, |
|
"loss": 0.3297, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2767419038272816, |
|
"grad_norm": 0.12110445033914359, |
|
"learning_rate": 1.554273855190572e-05, |
|
"loss": 0.3254, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.2787046123650638, |
|
"grad_norm": 0.11020981189703709, |
|
"learning_rate": 1.5529761313679396e-05, |
|
"loss": 0.3235, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.2806673209028459, |
|
"grad_norm": 0.15853842771502125, |
|
"learning_rate": 1.551677064703086e-05, |
|
"loss": 0.3264, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.282630029440628, |
|
"grad_norm": 0.11133610328097518, |
|
"learning_rate": 1.5503766583506522e-05, |
|
"loss": 0.329, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.2845927379784103, |
|
"grad_norm": 0.1193501946144463, |
|
"learning_rate": 1.549074915468534e-05, |
|
"loss": 0.332, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.2865554465161924, |
|
"grad_norm": 0.12269380787533944, |
|
"learning_rate": 1.5477718392178716e-05, |
|
"loss": 0.3314, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.2885181550539744, |
|
"grad_norm": 0.11313715963097541, |
|
"learning_rate": 1.5464674327630437e-05, |
|
"loss": 0.3222, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.2904808635917566, |
|
"grad_norm": 0.12048024103575065, |
|
"learning_rate": 1.545161699271659e-05, |
|
"loss": 0.3294, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.2924435721295389, |
|
"grad_norm": 0.11947900701309468, |
|
"learning_rate": 1.543854641914549e-05, |
|
"loss": 0.3235, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.2944062806673209, |
|
"grad_norm": 0.11207590380323458, |
|
"learning_rate": 1.5425462638657597e-05, |
|
"loss": 0.3266, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.296368989205103, |
|
"grad_norm": 0.12528223605914246, |
|
"learning_rate": 1.5412365683025447e-05, |
|
"loss": 0.3321, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.2983316977428851, |
|
"grad_norm": 0.10633033440100298, |
|
"learning_rate": 1.5399255584053568e-05, |
|
"loss": 0.3165, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.3002944062806674, |
|
"grad_norm": 0.11406876546510498, |
|
"learning_rate": 1.5386132373578405e-05, |
|
"loss": 0.3247, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.3022571148184494, |
|
"grad_norm": 0.11139990576253381, |
|
"learning_rate": 1.5372996083468242e-05, |
|
"loss": 0.3261, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.3042198233562317, |
|
"grad_norm": 0.10272448533365049, |
|
"learning_rate": 1.5359846745623128e-05, |
|
"loss": 0.316, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.3061825318940137, |
|
"grad_norm": 0.10645469773905245, |
|
"learning_rate": 1.5346684391974792e-05, |
|
"loss": 0.3311, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.308145240431796, |
|
"grad_norm": 0.10518169859307575, |
|
"learning_rate": 1.5333509054486583e-05, |
|
"loss": 0.3245, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.310107948969578, |
|
"grad_norm": 0.2242263732415979, |
|
"learning_rate": 1.5320320765153367e-05, |
|
"loss": 0.3109, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.3120706575073602, |
|
"grad_norm": 0.12309952064417946, |
|
"learning_rate": 1.5307119556001463e-05, |
|
"loss": 0.3196, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.3140333660451424, |
|
"grad_norm": 0.1129490904179756, |
|
"learning_rate": 1.529390545908857e-05, |
|
"loss": 0.3222, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.3159960745829244, |
|
"grad_norm": 0.1196632767822302, |
|
"learning_rate": 1.528067850650368e-05, |
|
"loss": 0.3182, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.3179587831207065, |
|
"grad_norm": 0.11338552151174172, |
|
"learning_rate": 1.526743873036701e-05, |
|
"loss": 0.3419, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.3199214916584887, |
|
"grad_norm": 0.17835671344319715, |
|
"learning_rate": 1.5254186162829903e-05, |
|
"loss": 0.3169, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.321884200196271, |
|
"grad_norm": 0.10783382011254725, |
|
"learning_rate": 1.5240920836074777e-05, |
|
"loss": 0.3196, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.323846908734053, |
|
"grad_norm": 0.10869968464668042, |
|
"learning_rate": 1.5227642782315037e-05, |
|
"loss": 0.3208, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.325809617271835, |
|
"grad_norm": 0.10637526513057953, |
|
"learning_rate": 1.5214352033794981e-05, |
|
"loss": 0.3263, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.3277723258096172, |
|
"grad_norm": 0.1239954358141019, |
|
"learning_rate": 1.5201048622789747e-05, |
|
"loss": 0.3262, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.3297350343473995, |
|
"grad_norm": 0.10993374676490457, |
|
"learning_rate": 1.5187732581605217e-05, |
|
"loss": 0.3367, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.3316977428851815, |
|
"grad_norm": 0.12433721542893529, |
|
"learning_rate": 1.5174403942577942e-05, |
|
"loss": 0.3329, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.3336604514229637, |
|
"grad_norm": 0.10189759821643117, |
|
"learning_rate": 1.5161062738075068e-05, |
|
"loss": 0.3191, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.3356231599607458, |
|
"grad_norm": 0.13313656343105662, |
|
"learning_rate": 1.5147709000494258e-05, |
|
"loss": 0.32, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.337585868498528, |
|
"grad_norm": 0.13908249732191438, |
|
"learning_rate": 1.5134342762263606e-05, |
|
"loss": 0.3295, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.33954857703631, |
|
"grad_norm": 0.11489030169841424, |
|
"learning_rate": 1.5120964055841563e-05, |
|
"loss": 0.3316, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3415112855740923, |
|
"grad_norm": 0.3799510436085688, |
|
"learning_rate": 1.5107572913716859e-05, |
|
"loss": 0.3162, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3434739941118745, |
|
"grad_norm": 0.11349571041844954, |
|
"learning_rate": 1.509416936840842e-05, |
|
"loss": 0.333, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3454367026496565, |
|
"grad_norm": 0.11005963527146614, |
|
"learning_rate": 1.5080753452465296e-05, |
|
"loss": 0.3272, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.3473994111874386, |
|
"grad_norm": 0.11959100787233001, |
|
"learning_rate": 1.5067325198466576e-05, |
|
"loss": 0.3311, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.3493621197252208, |
|
"grad_norm": 0.10886477394608883, |
|
"learning_rate": 1.505388463902131e-05, |
|
"loss": 0.3335, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.351324828263003, |
|
"grad_norm": 0.11344016969622332, |
|
"learning_rate": 1.504043180676843e-05, |
|
"loss": 0.3207, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.353287536800785, |
|
"grad_norm": 0.1102062785109799, |
|
"learning_rate": 1.502696673437667e-05, |
|
"loss": 0.3338, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3552502453385673, |
|
"grad_norm": 0.1134536009046737, |
|
"learning_rate": 1.5013489454544494e-05, |
|
"loss": 0.3212, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.3572129538763493, |
|
"grad_norm": 0.10812804193252123, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.3244, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.3591756624141316, |
|
"grad_norm": 0.10930042393971806, |
|
"learning_rate": 1.4986498403500864e-05, |
|
"loss": 0.3196, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.3611383709519136, |
|
"grad_norm": 0.11325005359482505, |
|
"learning_rate": 1.4972984697834238e-05, |
|
"loss": 0.3283, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.3631010794896958, |
|
"grad_norm": 0.11944962015232705, |
|
"learning_rate": 1.4959458915816681e-05, |
|
"loss": 0.3071, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.3650637880274779, |
|
"grad_norm": 0.11035654504778693, |
|
"learning_rate": 1.4945921090294076e-05, |
|
"loss": 0.3258, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.36702649656526, |
|
"grad_norm": 0.2791207437797803, |
|
"learning_rate": 1.4932371254141562e-05, |
|
"loss": 0.3444, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.3689892051030421, |
|
"grad_norm": 0.17253680739822636, |
|
"learning_rate": 1.4918809440263435e-05, |
|
"loss": 0.3263, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.3709519136408244, |
|
"grad_norm": 0.11509592444315454, |
|
"learning_rate": 1.4905235681593079e-05, |
|
"loss": 0.3159, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.3729146221786066, |
|
"grad_norm": 0.11893850145942958, |
|
"learning_rate": 1.4891650011092896e-05, |
|
"loss": 0.3361, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.3748773307163886, |
|
"grad_norm": 0.11595816082844375, |
|
"learning_rate": 1.4878052461754192e-05, |
|
"loss": 0.3172, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.3768400392541706, |
|
"grad_norm": 0.18006969131556863, |
|
"learning_rate": 1.486444306659714e-05, |
|
"loss": 0.3262, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.3788027477919529, |
|
"grad_norm": 0.11978594076007715, |
|
"learning_rate": 1.4850821858670668e-05, |
|
"loss": 0.3223, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.3807654563297351, |
|
"grad_norm": 0.1214666563188013, |
|
"learning_rate": 1.4837188871052399e-05, |
|
"loss": 0.3287, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.3827281648675172, |
|
"grad_norm": 0.1087134010196951, |
|
"learning_rate": 1.4823544136848554e-05, |
|
"loss": 0.3211, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.3846908734052994, |
|
"grad_norm": 0.11263273643445283, |
|
"learning_rate": 1.4809887689193878e-05, |
|
"loss": 0.321, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.3866535819430814, |
|
"grad_norm": 0.10441056093366385, |
|
"learning_rate": 1.4796219561251569e-05, |
|
"loss": 0.3205, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.3886162904808637, |
|
"grad_norm": 0.11847864217596303, |
|
"learning_rate": 1.4782539786213184e-05, |
|
"loss": 0.3358, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.3905789990186457, |
|
"grad_norm": 0.11231538216944834, |
|
"learning_rate": 1.4768848397298562e-05, |
|
"loss": 0.3265, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.392541707556428, |
|
"grad_norm": 0.1033742948772916, |
|
"learning_rate": 1.4755145427755755e-05, |
|
"loss": 0.3295, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.39450441609421, |
|
"grad_norm": 0.11293962071828316, |
|
"learning_rate": 1.4741430910860918e-05, |
|
"loss": 0.3284, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.3964671246319922, |
|
"grad_norm": 0.10568902630893928, |
|
"learning_rate": 1.4727704879918272e-05, |
|
"loss": 0.324, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.3984298331697742, |
|
"grad_norm": 0.10454582574341005, |
|
"learning_rate": 1.4713967368259981e-05, |
|
"loss": 0.3294, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.4003925417075564, |
|
"grad_norm": 0.11151842422641142, |
|
"learning_rate": 1.4700218409246087e-05, |
|
"loss": 0.3272, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.4023552502453387, |
|
"grad_norm": 0.1918179917026182, |
|
"learning_rate": 1.4686458036264446e-05, |
|
"loss": 0.3157, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.4043179587831207, |
|
"grad_norm": 0.11036284447673995, |
|
"learning_rate": 1.4672686282730622e-05, |
|
"loss": 0.312, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.4062806673209027, |
|
"grad_norm": 0.13510751112765382, |
|
"learning_rate": 1.4658903182087814e-05, |
|
"loss": 0.3382, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.408243375858685, |
|
"grad_norm": 0.10984538370082228, |
|
"learning_rate": 1.4645108767806778e-05, |
|
"loss": 0.3186, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.4102060843964672, |
|
"grad_norm": 0.11386113381529725, |
|
"learning_rate": 1.4631303073385745e-05, |
|
"loss": 0.333, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.4121687929342492, |
|
"grad_norm": 0.12922683158535153, |
|
"learning_rate": 1.4617486132350343e-05, |
|
"loss": 0.3131, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4141315014720315, |
|
"grad_norm": 0.11429762711313823, |
|
"learning_rate": 1.4603657978253499e-05, |
|
"loss": 0.3356, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.4160942100098135, |
|
"grad_norm": 0.11032902828120253, |
|
"learning_rate": 1.4589818644675378e-05, |
|
"loss": 0.324, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.4180569185475957, |
|
"grad_norm": 0.10896038916378659, |
|
"learning_rate": 1.4575968165223297e-05, |
|
"loss": 0.3273, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.4200196270853778, |
|
"grad_norm": 0.11197390514652242, |
|
"learning_rate": 1.4562106573531632e-05, |
|
"loss": 0.3212, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.42198233562316, |
|
"grad_norm": 0.11854344219452063, |
|
"learning_rate": 1.4548233903261746e-05, |
|
"loss": 0.3266, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.423945044160942, |
|
"grad_norm": 0.10788963190927026, |
|
"learning_rate": 1.4534350188101905e-05, |
|
"loss": 0.3261, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.4259077526987243, |
|
"grad_norm": 0.10409181843438578, |
|
"learning_rate": 1.45204554617672e-05, |
|
"loss": 0.3267, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.4278704612365063, |
|
"grad_norm": 0.11457863412214213, |
|
"learning_rate": 1.4506549757999456e-05, |
|
"loss": 0.3322, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.4298331697742885, |
|
"grad_norm": 1.3410888637080636, |
|
"learning_rate": 1.4492633110567155e-05, |
|
"loss": 0.3836, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.4317958783120708, |
|
"grad_norm": 0.12437392309356543, |
|
"learning_rate": 1.4478705553265363e-05, |
|
"loss": 0.3244, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.4337585868498528, |
|
"grad_norm": 0.11654470773611598, |
|
"learning_rate": 1.446476711991563e-05, |
|
"loss": 0.334, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.4357212953876348, |
|
"grad_norm": 0.295599491807141, |
|
"learning_rate": 1.4450817844365924e-05, |
|
"loss": 0.3292, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.437684003925417, |
|
"grad_norm": 0.23594221943762184, |
|
"learning_rate": 1.4436857760490539e-05, |
|
"loss": 0.3244, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.4396467124631993, |
|
"grad_norm": 0.13208951730861257, |
|
"learning_rate": 1.4422886902190014e-05, |
|
"loss": 0.3218, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.4416094210009813, |
|
"grad_norm": 0.12203894918097015, |
|
"learning_rate": 1.4408905303391054e-05, |
|
"loss": 0.3184, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.4435721295387636, |
|
"grad_norm": 0.12073834849861981, |
|
"learning_rate": 1.4394912998046451e-05, |
|
"loss": 0.3297, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4455348380765456, |
|
"grad_norm": 0.42701652972686566, |
|
"learning_rate": 1.4380910020134988e-05, |
|
"loss": 0.332, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4474975466143278, |
|
"grad_norm": 0.12608912444867507, |
|
"learning_rate": 1.436689640366137e-05, |
|
"loss": 0.3267, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.4494602551521099, |
|
"grad_norm": 0.16001847759687152, |
|
"learning_rate": 1.435287218265614e-05, |
|
"loss": 0.3315, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.451422963689892, |
|
"grad_norm": 0.12030660319385979, |
|
"learning_rate": 1.4338837391175582e-05, |
|
"loss": 0.3285, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.4533856722276741, |
|
"grad_norm": 0.1253610461061891, |
|
"learning_rate": 1.4324792063301662e-05, |
|
"loss": 0.3351, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.4553483807654564, |
|
"grad_norm": 0.11052209080423725, |
|
"learning_rate": 1.4310736233141926e-05, |
|
"loss": 0.3289, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.4573110893032384, |
|
"grad_norm": 0.128573183549157, |
|
"learning_rate": 1.4296669934829425e-05, |
|
"loss": 0.3281, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.4592737978410206, |
|
"grad_norm": 0.11507190335793269, |
|
"learning_rate": 1.4282593202522627e-05, |
|
"loss": 0.331, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.4612365063788029, |
|
"grad_norm": 0.11860253438573154, |
|
"learning_rate": 1.4268506070405345e-05, |
|
"loss": 0.3278, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.463199214916585, |
|
"grad_norm": 0.10619138540627954, |
|
"learning_rate": 1.4254408572686642e-05, |
|
"loss": 0.3211, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.465161923454367, |
|
"grad_norm": 0.2575777832730168, |
|
"learning_rate": 1.424030074360075e-05, |
|
"loss": 0.3263, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.4671246319921492, |
|
"grad_norm": 0.11876073393454559, |
|
"learning_rate": 1.4226182617406996e-05, |
|
"loss": 0.3416, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.4690873405299314, |
|
"grad_norm": 0.1093129912760756, |
|
"learning_rate": 1.4212054228389712e-05, |
|
"loss": 0.3233, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.4710500490677134, |
|
"grad_norm": 0.11230780423472099, |
|
"learning_rate": 1.4197915610858143e-05, |
|
"loss": 0.32, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4730127576054957, |
|
"grad_norm": 0.12038548720872048, |
|
"learning_rate": 1.4183766799146383e-05, |
|
"loss": 0.3246, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.4749754661432777, |
|
"grad_norm": 0.15570969251778305, |
|
"learning_rate": 1.4169607827613284e-05, |
|
"loss": 0.3261, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.47693817468106, |
|
"grad_norm": 0.3408875294488595, |
|
"learning_rate": 1.4155438730642354e-05, |
|
"loss": 0.3372, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.478900883218842, |
|
"grad_norm": 0.10372129141329851, |
|
"learning_rate": 1.4141259542641706e-05, |
|
"loss": 0.3275, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.4808635917566242, |
|
"grad_norm": 0.10861393950490046, |
|
"learning_rate": 1.4127070298043949e-05, |
|
"loss": 0.3195, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.4828263002944062, |
|
"grad_norm": 0.2287314698384735, |
|
"learning_rate": 1.4112871031306118e-05, |
|
"loss": 0.3392, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.4847890088321885, |
|
"grad_norm": 0.14387105066919137, |
|
"learning_rate": 1.4098661776909581e-05, |
|
"loss": 0.3278, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.4867517173699705, |
|
"grad_norm": 0.11031577836845029, |
|
"learning_rate": 1.4084442569359964e-05, |
|
"loss": 0.3261, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.4887144259077527, |
|
"grad_norm": 0.10372345153909722, |
|
"learning_rate": 1.4070213443187062e-05, |
|
"loss": 0.304, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.490677134445535, |
|
"grad_norm": 1.1167059057492386, |
|
"learning_rate": 1.4055974432944753e-05, |
|
"loss": 0.3512, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.492639842983317, |
|
"grad_norm": 0.3507372435965062, |
|
"learning_rate": 1.404172557321092e-05, |
|
"loss": 0.3467, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.494602551521099, |
|
"grad_norm": 0.13192221407076998, |
|
"learning_rate": 1.4027466898587375e-05, |
|
"loss": 0.3255, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.4965652600588812, |
|
"grad_norm": 0.13116992214786896, |
|
"learning_rate": 1.401319844369974e-05, |
|
"loss": 0.3385, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.4985279685966635, |
|
"grad_norm": 0.14515008145396116, |
|
"learning_rate": 1.3998920243197408e-05, |
|
"loss": 0.331, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.5004906771344455, |
|
"grad_norm": 0.12599311264669502, |
|
"learning_rate": 1.3984632331753436e-05, |
|
"loss": 0.3184, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.5024533856722275, |
|
"grad_norm": 0.11722994192309723, |
|
"learning_rate": 1.3970334744064451e-05, |
|
"loss": 0.3186, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.5044160942100098, |
|
"grad_norm": 0.1297039712182582, |
|
"learning_rate": 1.395602751485059e-05, |
|
"loss": 0.3239, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.506378802747792, |
|
"grad_norm": 0.12096476484978132, |
|
"learning_rate": 1.3941710678855396e-05, |
|
"loss": 0.3299, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.506378802747792, |
|
"eval_loss": 0.4330715835094452, |
|
"eval_runtime": 245.5392, |
|
"eval_samples_per_second": 123.618, |
|
"eval_steps_per_second": 3.865, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.508341511285574, |
|
"grad_norm": 0.12230386295101764, |
|
"learning_rate": 1.3927384270845744e-05, |
|
"loss": 0.3251, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.5103042198233563, |
|
"grad_norm": 0.5983535137475166, |
|
"learning_rate": 1.391304832561175e-05, |
|
"loss": 0.3648, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5122669283611385, |
|
"grad_norm": 0.11674041507665114, |
|
"learning_rate": 1.38987028779667e-05, |
|
"loss": 0.3392, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.5142296368989205, |
|
"grad_norm": 0.1729456768990231, |
|
"learning_rate": 1.3884347962746949e-05, |
|
"loss": 0.3225, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.5161923454367026, |
|
"grad_norm": 0.1252493496614802, |
|
"learning_rate": 1.3869983614811837e-05, |
|
"loss": 0.3209, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.5181550539744848, |
|
"grad_norm": 0.11700364355947357, |
|
"learning_rate": 1.3855609869043618e-05, |
|
"loss": 0.3315, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.520117762512267, |
|
"grad_norm": 0.1462155960573189, |
|
"learning_rate": 1.384122676034737e-05, |
|
"loss": 0.3259, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.522080471050049, |
|
"grad_norm": 0.13319429573990046, |
|
"learning_rate": 1.3826834323650899e-05, |
|
"loss": 0.3334, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.524043179587831, |
|
"grad_norm": 0.12414522959921269, |
|
"learning_rate": 1.381243259390467e-05, |
|
"loss": 0.3316, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.5260058881256133, |
|
"grad_norm": 0.12126161126577431, |
|
"learning_rate": 1.3798021606081713e-05, |
|
"loss": 0.3178, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.5279685966633956, |
|
"grad_norm": 0.11507460737639888, |
|
"learning_rate": 1.3783601395177537e-05, |
|
"loss": 0.3294, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.5299313052011776, |
|
"grad_norm": 0.11575304901544674, |
|
"learning_rate": 1.3769171996210053e-05, |
|
"loss": 0.3223, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.5318940137389596, |
|
"grad_norm": 0.131991412651207, |
|
"learning_rate": 1.3754733444219488e-05, |
|
"loss": 0.3302, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.5338567222767419, |
|
"grad_norm": 0.190709934290855, |
|
"learning_rate": 1.3740285774268282e-05, |
|
"loss": 0.3371, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.535819430814524, |
|
"grad_norm": 0.47614707484422286, |
|
"learning_rate": 1.372582902144103e-05, |
|
"loss": 0.3193, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.5377821393523061, |
|
"grad_norm": 0.11311414215315257, |
|
"learning_rate": 1.371136322084438e-05, |
|
"loss": 0.3147, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.5397448478900884, |
|
"grad_norm": 0.12393935880179471, |
|
"learning_rate": 1.3696888407606952e-05, |
|
"loss": 0.3294, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.5417075564278706, |
|
"grad_norm": 0.12459873866982599, |
|
"learning_rate": 1.3682404616879246e-05, |
|
"loss": 0.3523, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.5436702649656526, |
|
"grad_norm": 1.5376937047472998, |
|
"learning_rate": 1.3667911883833573e-05, |
|
"loss": 0.3232, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.5456329735034346, |
|
"grad_norm": 0.17863323707586204, |
|
"learning_rate": 1.3653410243663953e-05, |
|
"loss": 0.3294, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.547595682041217, |
|
"grad_norm": 0.13291935976093486, |
|
"learning_rate": 1.3638899731586036e-05, |
|
"loss": 0.3343, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5495583905789991, |
|
"grad_norm": 0.19803813677781645, |
|
"learning_rate": 1.3624380382837017e-05, |
|
"loss": 0.3359, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5515210991167812, |
|
"grad_norm": 0.1595795029165099, |
|
"learning_rate": 1.3609852232675558e-05, |
|
"loss": 0.326, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.5534838076545632, |
|
"grad_norm": 0.14037586512565553, |
|
"learning_rate": 1.3595315316381676e-05, |
|
"loss": 0.3302, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.5554465161923454, |
|
"grad_norm": 0.16013145607494655, |
|
"learning_rate": 1.3580769669256695e-05, |
|
"loss": 0.3383, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.5574092247301277, |
|
"grad_norm": 0.13377340237611735, |
|
"learning_rate": 1.3566215326623131e-05, |
|
"loss": 0.3235, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.5593719332679097, |
|
"grad_norm": 0.1385245785766373, |
|
"learning_rate": 1.3551652323824617e-05, |
|
"loss": 0.3214, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.5613346418056917, |
|
"grad_norm": 0.15040533378760934, |
|
"learning_rate": 1.3537080696225815e-05, |
|
"loss": 0.3396, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.563297350343474, |
|
"grad_norm": 0.1329758232018135, |
|
"learning_rate": 1.3522500479212337e-05, |
|
"loss": 0.3358, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.5652600588812562, |
|
"grad_norm": 0.12257059717404521, |
|
"learning_rate": 1.3507911708190646e-05, |
|
"loss": 0.3176, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.5672227674190382, |
|
"grad_norm": 0.13209253543187308, |
|
"learning_rate": 1.3493314418587982e-05, |
|
"loss": 0.3314, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.5691854759568205, |
|
"grad_norm": 0.11457657070098855, |
|
"learning_rate": 1.3478708645852272e-05, |
|
"loss": 0.3306, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5711481844946027, |
|
"grad_norm": 0.19371352224283817, |
|
"learning_rate": 1.3464094425452046e-05, |
|
"loss": 0.3145, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.5731108930323847, |
|
"grad_norm": 0.10702419786852241, |
|
"learning_rate": 1.3449471792876333e-05, |
|
"loss": 0.3227, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.5750736015701667, |
|
"grad_norm": 0.11647703035094253, |
|
"learning_rate": 1.3434840783634611e-05, |
|
"loss": 0.3246, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.577036310107949, |
|
"grad_norm": 0.14359657153339322, |
|
"learning_rate": 1.342020143325669e-05, |
|
"loss": 0.3198, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.5789990186457312, |
|
"grad_norm": 0.11430760797920907, |
|
"learning_rate": 1.3405553777292627e-05, |
|
"loss": 0.3174, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.5809617271835132, |
|
"grad_norm": 0.12045105701279527, |
|
"learning_rate": 1.3390897851312667e-05, |
|
"loss": 0.3281, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.5829244357212953, |
|
"grad_norm": 0.11152977267946707, |
|
"learning_rate": 1.3376233690907126e-05, |
|
"loss": 0.3343, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.5848871442590775, |
|
"grad_norm": 0.11046335035659553, |
|
"learning_rate": 1.336156133168631e-05, |
|
"loss": 0.3222, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.5868498527968598, |
|
"grad_norm": 0.2805415736645124, |
|
"learning_rate": 1.3346880809280451e-05, |
|
"loss": 0.3285, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.5888125613346418, |
|
"grad_norm": 0.1165026997728163, |
|
"learning_rate": 1.3332192159339595e-05, |
|
"loss": 0.3327, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5907752698724238, |
|
"grad_norm": 0.10269266294028667, |
|
"learning_rate": 1.3317495417533523e-05, |
|
"loss": 0.3288, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.592737978410206, |
|
"grad_norm": 0.10882094664575565, |
|
"learning_rate": 1.3302790619551673e-05, |
|
"loss": 0.3226, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.5947006869479883, |
|
"grad_norm": 0.10402717390991092, |
|
"learning_rate": 1.3288077801103041e-05, |
|
"loss": 0.3242, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.5966633954857703, |
|
"grad_norm": 0.11773324479057384, |
|
"learning_rate": 1.3273356997916106e-05, |
|
"loss": 0.3312, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.5986261040235525, |
|
"grad_norm": 0.10710334700945738, |
|
"learning_rate": 1.3258628245738726e-05, |
|
"loss": 0.3249, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.6005888125613348, |
|
"grad_norm": 0.10399693106990227, |
|
"learning_rate": 1.3243891580338074e-05, |
|
"loss": 0.3272, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.6025515210991168, |
|
"grad_norm": 0.107596072668019, |
|
"learning_rate": 1.3229147037500534e-05, |
|
"loss": 0.3334, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.6045142296368988, |
|
"grad_norm": 0.1069444594286671, |
|
"learning_rate": 1.3214394653031616e-05, |
|
"loss": 0.318, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.606476938174681, |
|
"grad_norm": 0.2003747727263415, |
|
"learning_rate": 1.3199634462755886e-05, |
|
"loss": 0.321, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.6084396467124633, |
|
"grad_norm": 0.10473434193181139, |
|
"learning_rate": 1.3184866502516846e-05, |
|
"loss": 0.3289, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.6104023552502453, |
|
"grad_norm": 0.11035758045532282, |
|
"learning_rate": 1.3170090808176883e-05, |
|
"loss": 0.3302, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.6123650637880274, |
|
"grad_norm": 0.10519561826066223, |
|
"learning_rate": 1.3155307415617156e-05, |
|
"loss": 0.3264, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.6143277723258096, |
|
"grad_norm": 0.11015590527571209, |
|
"learning_rate": 1.3140516360737523e-05, |
|
"loss": 0.3173, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.6162904808635918, |
|
"grad_norm": 0.10092061607132297, |
|
"learning_rate": 1.3125717679456447e-05, |
|
"loss": 0.331, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.6182531894013739, |
|
"grad_norm": 0.1024320679905342, |
|
"learning_rate": 1.3110911407710909e-05, |
|
"loss": 0.3177, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.6202158979391559, |
|
"grad_norm": 0.10726261404380205, |
|
"learning_rate": 1.309609758145633e-05, |
|
"loss": 0.33, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.6221786064769381, |
|
"grad_norm": 0.30696269389558967, |
|
"learning_rate": 1.308127623666647e-05, |
|
"loss": 0.3217, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.6241413150147204, |
|
"grad_norm": 0.11038918441384825, |
|
"learning_rate": 1.3066447409333345e-05, |
|
"loss": 0.3428, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.6261040235525024, |
|
"grad_norm": 0.13114032892510316, |
|
"learning_rate": 1.3051611135467145e-05, |
|
"loss": 0.3217, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.6280667320902846, |
|
"grad_norm": 0.10209817496354183, |
|
"learning_rate": 1.3036767451096148e-05, |
|
"loss": 0.3169, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.6300294406280669, |
|
"grad_norm": 0.11163305235516735, |
|
"learning_rate": 1.3021916392266618e-05, |
|
"loss": 0.3215, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.631992149165849, |
|
"grad_norm": 0.10335841003276967, |
|
"learning_rate": 1.300705799504273e-05, |
|
"loss": 0.3179, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.633954857703631, |
|
"grad_norm": 0.10298399337006449, |
|
"learning_rate": 1.2992192295506489e-05, |
|
"loss": 0.3342, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.6359175662414132, |
|
"grad_norm": 0.10601682383580134, |
|
"learning_rate": 1.2977319329757616e-05, |
|
"loss": 0.3189, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.6378802747791954, |
|
"grad_norm": 0.1018229680915178, |
|
"learning_rate": 1.296243913391349e-05, |
|
"loss": 0.3231, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.6398429833169774, |
|
"grad_norm": 0.1085023326981021, |
|
"learning_rate": 1.2947551744109044e-05, |
|
"loss": 0.3184, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.6418056918547594, |
|
"grad_norm": 0.10661650845272141, |
|
"learning_rate": 1.2932657196496678e-05, |
|
"loss": 0.3167, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.6437684003925417, |
|
"grad_norm": 0.11175562549636477, |
|
"learning_rate": 1.2917755527246179e-05, |
|
"loss": 0.3316, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.645731108930324, |
|
"grad_norm": 0.10523122873271762, |
|
"learning_rate": 1.2902846772544625e-05, |
|
"loss": 0.3312, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.647693817468106, |
|
"grad_norm": 0.13408743582159743, |
|
"learning_rate": 1.28879309685963e-05, |
|
"loss": 0.3279, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.649656526005888, |
|
"grad_norm": 0.4305626635372785, |
|
"learning_rate": 1.2873008151622606e-05, |
|
"loss": 0.3143, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6516192345436702, |
|
"grad_norm": 0.10214474856379448, |
|
"learning_rate": 1.2858078357861979e-05, |
|
"loss": 0.3398, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6535819430814525, |
|
"grad_norm": 0.19415909087113795, |
|
"learning_rate": 1.2843141623569792e-05, |
|
"loss": 0.3205, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.6555446516192345, |
|
"grad_norm": 0.11011953169021917, |
|
"learning_rate": 1.2828197985018276e-05, |
|
"loss": 0.3336, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.6575073601570167, |
|
"grad_norm": 0.1503839751469065, |
|
"learning_rate": 1.2813247478496428e-05, |
|
"loss": 0.3279, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.659470068694799, |
|
"grad_norm": 0.11781800325747578, |
|
"learning_rate": 1.2798290140309924e-05, |
|
"loss": 0.3254, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.661432777232581, |
|
"grad_norm": 0.10932301796870458, |
|
"learning_rate": 1.2783326006781023e-05, |
|
"loss": 0.3131, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.663395485770363, |
|
"grad_norm": 0.11978600321445199, |
|
"learning_rate": 1.2768355114248493e-05, |
|
"loss": 0.3277, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.6653581943081452, |
|
"grad_norm": 0.11039786376284047, |
|
"learning_rate": 1.2753377499067522e-05, |
|
"loss": 0.3522, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.6673209028459275, |
|
"grad_norm": 0.2497098566113429, |
|
"learning_rate": 1.2738393197609602e-05, |
|
"loss": 0.3297, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.6692836113837095, |
|
"grad_norm": 0.11865444749257295, |
|
"learning_rate": 1.2723402246262484e-05, |
|
"loss": 0.3271, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.6712463199214915, |
|
"grad_norm": 0.11095090763334178, |
|
"learning_rate": 1.2708404681430054e-05, |
|
"loss": 0.3243, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.6732090284592738, |
|
"grad_norm": 0.11867828722172012, |
|
"learning_rate": 1.2693400539532263e-05, |
|
"loss": 0.3141, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.675171736997056, |
|
"grad_norm": 0.1131423717161425, |
|
"learning_rate": 1.2678389857005033e-05, |
|
"loss": 0.3256, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.677134445534838, |
|
"grad_norm": 0.8893669616224853, |
|
"learning_rate": 1.266337267030017e-05, |
|
"loss": 0.3338, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.67909715407262, |
|
"grad_norm": 0.1621276184736285, |
|
"learning_rate": 1.2648349015885272e-05, |
|
"loss": 0.331, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.6810598626104023, |
|
"grad_norm": 0.12724899204009446, |
|
"learning_rate": 1.2633318930243647e-05, |
|
"loss": 0.3168, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.6830225711481845, |
|
"grad_norm": 0.15162789739847926, |
|
"learning_rate": 1.2618282449874221e-05, |
|
"loss": 0.3147, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.6849852796859666, |
|
"grad_norm": 0.14035812455971758, |
|
"learning_rate": 1.2603239611291445e-05, |
|
"loss": 0.3215, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.6869479882237488, |
|
"grad_norm": 0.46008864002942473, |
|
"learning_rate": 1.2588190451025209e-05, |
|
"loss": 0.325, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.688910696761531, |
|
"grad_norm": 0.15188638903617155, |
|
"learning_rate": 1.2573135005620757e-05, |
|
"loss": 0.3091, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.690873405299313, |
|
"grad_norm": 0.1372788892477886, |
|
"learning_rate": 1.2558073311638604e-05, |
|
"loss": 0.3313, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.692836113837095, |
|
"grad_norm": 0.1317279830573809, |
|
"learning_rate": 1.2543005405654418e-05, |
|
"loss": 0.3169, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.6947988223748773, |
|
"grad_norm": 0.13349361942014895, |
|
"learning_rate": 1.2527931324258975e-05, |
|
"loss": 0.3203, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.6967615309126596, |
|
"grad_norm": 0.12909951263188774, |
|
"learning_rate": 1.2512851104058038e-05, |
|
"loss": 0.3202, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.6987242394504416, |
|
"grad_norm": 0.2566617860841747, |
|
"learning_rate": 1.249776478167227e-05, |
|
"loss": 0.3295, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.7006869479882236, |
|
"grad_norm": 0.11969281942134942, |
|
"learning_rate": 1.2482672393737164e-05, |
|
"loss": 0.3342, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.7026496565260059, |
|
"grad_norm": 0.12518924207179494, |
|
"learning_rate": 1.2467573976902936e-05, |
|
"loss": 0.3333, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.704612365063788, |
|
"grad_norm": 0.21335666453466184, |
|
"learning_rate": 1.2452469567834449e-05, |
|
"loss": 0.3198, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.7065750736015701, |
|
"grad_norm": 0.12157902223714591, |
|
"learning_rate": 1.2437359203211109e-05, |
|
"loss": 0.308, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.7085377821393521, |
|
"grad_norm": 0.13207634498097479, |
|
"learning_rate": 1.2422242919726786e-05, |
|
"loss": 0.3338, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.7105004906771346, |
|
"grad_norm": 0.10700849261671908, |
|
"learning_rate": 1.2407120754089733e-05, |
|
"loss": 0.3221, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.7124631992149166, |
|
"grad_norm": 0.12604995554733597, |
|
"learning_rate": 1.2391992743022472e-05, |
|
"loss": 0.3165, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.7144259077526987, |
|
"grad_norm": 0.1613701947168364, |
|
"learning_rate": 1.2376858923261732e-05, |
|
"loss": 0.3217, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.716388616290481, |
|
"grad_norm": 0.1328873058860125, |
|
"learning_rate": 1.2361719331558346e-05, |
|
"loss": 0.3262, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.7183513248282631, |
|
"grad_norm": 0.11726688731965726, |
|
"learning_rate": 1.2346574004677154e-05, |
|
"loss": 0.3298, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.7203140333660452, |
|
"grad_norm": 0.1214011987184142, |
|
"learning_rate": 1.2331422979396936e-05, |
|
"loss": 0.337, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.7222767419038272, |
|
"grad_norm": 0.12820530343064107, |
|
"learning_rate": 1.2316266292510305e-05, |
|
"loss": 0.327, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.7242394504416094, |
|
"grad_norm": 0.11451384636314305, |
|
"learning_rate": 1.2301103980823619e-05, |
|
"loss": 0.3235, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.7262021589793917, |
|
"grad_norm": 0.10873652825630871, |
|
"learning_rate": 1.2285936081156897e-05, |
|
"loss": 0.311, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.7281648675171737, |
|
"grad_norm": 0.10624002895012581, |
|
"learning_rate": 1.2270762630343734e-05, |
|
"loss": 0.3245, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.7301275760549557, |
|
"grad_norm": 0.10425222382859203, |
|
"learning_rate": 1.2255583665231196e-05, |
|
"loss": 0.3211, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.732090284592738, |
|
"grad_norm": 0.10943316735201042, |
|
"learning_rate": 1.2240399222679747e-05, |
|
"loss": 0.3166, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.7340529931305202, |
|
"grad_norm": 0.108007053530337, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.3271, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.7360157016683022, |
|
"grad_norm": 0.1806278564221946, |
|
"learning_rate": 1.221001405276837e-05, |
|
"loss": 0.3342, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.7379784102060842, |
|
"grad_norm": 0.11217999203542753, |
|
"learning_rate": 1.2194813399195518e-05, |
|
"loss": 0.3174, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.7399411187438667, |
|
"grad_norm": 0.11446924843967385, |
|
"learning_rate": 1.217960741575771e-05, |
|
"loss": 0.3375, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.7419038272816487, |
|
"grad_norm": 0.11052403929142486, |
|
"learning_rate": 1.2164396139381029e-05, |
|
"loss": 0.3207, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.7438665358194307, |
|
"grad_norm": 0.12041833183157077, |
|
"learning_rate": 1.2149179607004396e-05, |
|
"loss": 0.3215, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.745829244357213, |
|
"grad_norm": 0.11164358188191847, |
|
"learning_rate": 1.2133957855579501e-05, |
|
"loss": 0.3124, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.7477919528949952, |
|
"grad_norm": 0.10293795446688632, |
|
"learning_rate": 1.2118730922070707e-05, |
|
"loss": 0.323, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.7497546614327772, |
|
"grad_norm": 0.10970003809084786, |
|
"learning_rate": 1.210349884345496e-05, |
|
"loss": 0.3267, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.7517173699705593, |
|
"grad_norm": 0.10828051591336894, |
|
"learning_rate": 1.20882616567217e-05, |
|
"loss": 0.329, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.7536800785083415, |
|
"grad_norm": 0.11148013332448067, |
|
"learning_rate": 1.2073019398872778e-05, |
|
"loss": 0.3201, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7556427870461238, |
|
"grad_norm": 0.1154913650725476, |
|
"learning_rate": 1.205777210692235e-05, |
|
"loss": 0.3318, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.7576054955839058, |
|
"grad_norm": 0.10882465073502968, |
|
"learning_rate": 1.2042519817896805e-05, |
|
"loss": 0.3311, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.7576054955839058, |
|
"eval_loss": 0.4246857762336731, |
|
"eval_runtime": 245.6728, |
|
"eval_samples_per_second": 123.551, |
|
"eval_steps_per_second": 3.863, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.7595682041216878, |
|
"grad_norm": 0.1164747664318698, |
|
"learning_rate": 1.202726256883466e-05, |
|
"loss": 0.317, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.76153091265947, |
|
"grad_norm": 0.11482257791759276, |
|
"learning_rate": 1.2012000396786485e-05, |
|
"loss": 0.3361, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.7634936211972523, |
|
"grad_norm": 0.10646048917975168, |
|
"learning_rate": 1.1996733338814795e-05, |
|
"loss": 0.3241, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.7654563297350343, |
|
"grad_norm": 0.10984226703740471, |
|
"learning_rate": 1.1981461431993978e-05, |
|
"loss": 0.3133, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.7674190382728163, |
|
"grad_norm": 0.1047308434934182, |
|
"learning_rate": 1.1966184713410192e-05, |
|
"loss": 0.3335, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.7693817468105988, |
|
"grad_norm": 0.10425069083553949, |
|
"learning_rate": 1.1950903220161286e-05, |
|
"loss": 0.3226, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.7713444553483808, |
|
"grad_norm": 0.10353476391750117, |
|
"learning_rate": 1.1935616989356693e-05, |
|
"loss": 0.3341, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.7733071638861628, |
|
"grad_norm": 0.13560691295515553, |
|
"learning_rate": 1.1920326058117364e-05, |
|
"loss": 0.3233, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.775269872423945, |
|
"grad_norm": 0.10178800675902581, |
|
"learning_rate": 1.190503046357565e-05, |
|
"loss": 0.3167, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.7772325809617273, |
|
"grad_norm": 0.21889157131254638, |
|
"learning_rate": 1.1889730242875243e-05, |
|
"loss": 0.3422, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.7791952894995093, |
|
"grad_norm": 0.10614581871424822, |
|
"learning_rate": 1.1874425433171055e-05, |
|
"loss": 0.309, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.7811579980372914, |
|
"grad_norm": 0.10365379424748211, |
|
"learning_rate": 1.1859116071629148e-05, |
|
"loss": 0.3137, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.7831207065750736, |
|
"grad_norm": 0.10348421216023944, |
|
"learning_rate": 1.1843802195426634e-05, |
|
"loss": 0.3225, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.7850834151128558, |
|
"grad_norm": 0.1060682052762049, |
|
"learning_rate": 1.1828483841751597e-05, |
|
"loss": 0.3139, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7870461236506379, |
|
"grad_norm": 0.11042414275132055, |
|
"learning_rate": 1.1813161047802986e-05, |
|
"loss": 0.35, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.7890088321884199, |
|
"grad_norm": 0.11161988036910164, |
|
"learning_rate": 1.1797833850790527e-05, |
|
"loss": 0.3313, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.7909715407262021, |
|
"grad_norm": 0.11073431733692736, |
|
"learning_rate": 1.1782502287934659e-05, |
|
"loss": 0.3326, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.7929342492639844, |
|
"grad_norm": 0.10914796174284935, |
|
"learning_rate": 1.1767166396466404e-05, |
|
"loss": 0.3177, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.7948969578017664, |
|
"grad_norm": 0.10465459851626065, |
|
"learning_rate": 1.1751826213627297e-05, |
|
"loss": 0.3266, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.7968596663395484, |
|
"grad_norm": 0.10041864035051187, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.3213, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.7988223748773309, |
|
"grad_norm": 0.11485845865027179, |
|
"learning_rate": 1.172113312285472e-05, |
|
"loss": 0.3265, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.800785083415113, |
|
"grad_norm": 0.12033881568016211, |
|
"learning_rate": 1.1705780289456069e-05, |
|
"loss": 0.3197, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.802747791952895, |
|
"grad_norm": 0.11332046772222625, |
|
"learning_rate": 1.1690423313756037e-05, |
|
"loss": 0.3139, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.8047105004906772, |
|
"grad_norm": 0.10473876200254037, |
|
"learning_rate": 1.1675062233047365e-05, |
|
"loss": 0.3281, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.8066732090284594, |
|
"grad_norm": 0.1184964198201036, |
|
"learning_rate": 1.165969708463276e-05, |
|
"loss": 0.3223, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.8086359175662414, |
|
"grad_norm": 0.12474305158961706, |
|
"learning_rate": 1.1644327905824808e-05, |
|
"loss": 0.3214, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.8105986261040234, |
|
"grad_norm": 0.09933741044605185, |
|
"learning_rate": 1.162895473394589e-05, |
|
"loss": 0.33, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.8125613346418057, |
|
"grad_norm": 0.25775919840623956, |
|
"learning_rate": 1.1613577606328068e-05, |
|
"loss": 0.3202, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.814524043179588, |
|
"grad_norm": 0.10094921766955992, |
|
"learning_rate": 1.1598196560313024e-05, |
|
"loss": 0.3092, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.81648675171737, |
|
"grad_norm": 0.11514280556001413, |
|
"learning_rate": 1.1582811633251949e-05, |
|
"loss": 0.3109, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.818449460255152, |
|
"grad_norm": 0.13680270431298738, |
|
"learning_rate": 1.1567422862505465e-05, |
|
"loss": 0.3275, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.8204121687929342, |
|
"grad_norm": 0.10821477293958764, |
|
"learning_rate": 1.1552030285443516e-05, |
|
"loss": 0.3185, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.8223748773307165, |
|
"grad_norm": 0.1020483099705299, |
|
"learning_rate": 1.1536633939445302e-05, |
|
"loss": 0.3216, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.8243375858684985, |
|
"grad_norm": 0.11310324846120763, |
|
"learning_rate": 1.1521233861899168e-05, |
|
"loss": 0.312, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.8263002944062807, |
|
"grad_norm": 0.10125352819285569, |
|
"learning_rate": 1.1505830090202524e-05, |
|
"loss": 0.3202, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.828263002944063, |
|
"grad_norm": 0.10518388367230969, |
|
"learning_rate": 1.1490422661761744e-05, |
|
"loss": 0.3161, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.830225711481845, |
|
"grad_norm": 0.10596564037766877, |
|
"learning_rate": 1.1475011613992097e-05, |
|
"loss": 0.3208, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.832188420019627, |
|
"grad_norm": 0.10832549491208261, |
|
"learning_rate": 1.1459596984317622e-05, |
|
"loss": 0.3198, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.8341511285574092, |
|
"grad_norm": 0.10340633509154877, |
|
"learning_rate": 1.1444178810171074e-05, |
|
"loss": 0.3347, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.8361138370951915, |
|
"grad_norm": 0.11187292516624683, |
|
"learning_rate": 1.1428757128993801e-05, |
|
"loss": 0.3159, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.8380765456329735, |
|
"grad_norm": 0.10201292596555332, |
|
"learning_rate": 1.1413331978235677e-05, |
|
"loss": 0.3166, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.8400392541707555, |
|
"grad_norm": 0.10804358781472774, |
|
"learning_rate": 1.1397903395354996e-05, |
|
"loss": 0.323, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.8420019627085378, |
|
"grad_norm": 0.10468656385490047, |
|
"learning_rate": 1.138247141781839e-05, |
|
"loss": 0.3185, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.84396467124632, |
|
"grad_norm": 0.21138099918352082, |
|
"learning_rate": 1.1367036083100735e-05, |
|
"loss": 0.3321, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.845927379784102, |
|
"grad_norm": 0.1007451545158691, |
|
"learning_rate": 1.1351597428685055e-05, |
|
"loss": 0.3201, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.847890088321884, |
|
"grad_norm": 0.10166102006184587, |
|
"learning_rate": 1.1336155492062439e-05, |
|
"loss": 0.3217, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.8498527968596663, |
|
"grad_norm": 0.10712467451663077, |
|
"learning_rate": 1.132071031073195e-05, |
|
"loss": 0.3207, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.8518155053974485, |
|
"grad_norm": 0.11530938712137909, |
|
"learning_rate": 1.130526192220052e-05, |
|
"loss": 0.3265, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.8537782139352306, |
|
"grad_norm": 0.102223465331792, |
|
"learning_rate": 1.1289810363982875e-05, |
|
"loss": 0.3109, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.8557409224730128, |
|
"grad_norm": 0.10435635169915954, |
|
"learning_rate": 1.1274355673601446e-05, |
|
"loss": 0.3215, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.857703631010795, |
|
"grad_norm": 0.10323000509883015, |
|
"learning_rate": 1.1258897888586256e-05, |
|
"loss": 0.3245, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.859666339548577, |
|
"grad_norm": 0.2356211384064762, |
|
"learning_rate": 1.1243437046474854e-05, |
|
"loss": 0.325, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.861629048086359, |
|
"grad_norm": 0.09975701738920462, |
|
"learning_rate": 1.1227973184812207e-05, |
|
"loss": 0.3125, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.8635917566241413, |
|
"grad_norm": 0.10475942757691821, |
|
"learning_rate": 1.1212506341150615e-05, |
|
"loss": 0.3168, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.8655544651619236, |
|
"grad_norm": 0.4497324410418576, |
|
"learning_rate": 1.1197036553049626e-05, |
|
"loss": 0.3325, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.8675171736997056, |
|
"grad_norm": 0.10885422432097863, |
|
"learning_rate": 1.118156385807593e-05, |
|
"loss": 0.3134, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.8694798822374876, |
|
"grad_norm": 0.9319115703562274, |
|
"learning_rate": 1.1166088293803276e-05, |
|
"loss": 0.3329, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.8714425907752699, |
|
"grad_norm": 0.1123785787687025, |
|
"learning_rate": 1.1150609897812387e-05, |
|
"loss": 0.311, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.873405299313052, |
|
"grad_norm": 0.10319256076853457, |
|
"learning_rate": 1.1135128707690862e-05, |
|
"loss": 0.3156, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.8753680078508341, |
|
"grad_norm": 0.7095695854915799, |
|
"learning_rate": 1.1119644761033079e-05, |
|
"loss": 0.3579, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.8773307163886161, |
|
"grad_norm": 0.167595727369702, |
|
"learning_rate": 1.1104158095440115e-05, |
|
"loss": 0.3277, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.8792934249263984, |
|
"grad_norm": 0.270060302607285, |
|
"learning_rate": 1.1088668748519646e-05, |
|
"loss": 0.3242, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.8812561334641806, |
|
"grad_norm": 0.15698959445501914, |
|
"learning_rate": 1.1073176757885866e-05, |
|
"loss": 0.361, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.8832188420019627, |
|
"grad_norm": 0.3221946038337756, |
|
"learning_rate": 1.105768216115938e-05, |
|
"loss": 0.3188, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.885181550539745, |
|
"grad_norm": 0.1406486088615483, |
|
"learning_rate": 1.1042184995967127e-05, |
|
"loss": 0.3312, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.8871442590775271, |
|
"grad_norm": 0.13397810433694357, |
|
"learning_rate": 1.1026685299942286e-05, |
|
"loss": 0.3261, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.8891069676153092, |
|
"grad_norm": 1.4841344998774781, |
|
"learning_rate": 1.1011183110724173e-05, |
|
"loss": 0.3272, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.8910696761530912, |
|
"grad_norm": 0.1659981257856718, |
|
"learning_rate": 1.0995678465958168e-05, |
|
"loss": 0.3288, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.8930323846908734, |
|
"grad_norm": 0.16778967117736607, |
|
"learning_rate": 1.098017140329561e-05, |
|
"loss": 0.3232, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.8949950932286557, |
|
"grad_norm": 0.21080374151960482, |
|
"learning_rate": 1.0964661960393703e-05, |
|
"loss": 0.3274, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.8969578017664377, |
|
"grad_norm": 0.16463715782689323, |
|
"learning_rate": 1.0949150174915441e-05, |
|
"loss": 0.3352, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.8989205103042197, |
|
"grad_norm": 0.2267681805465417, |
|
"learning_rate": 1.0933636084529507e-05, |
|
"loss": 0.3388, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.900883218842002, |
|
"grad_norm": 0.14488446998635424, |
|
"learning_rate": 1.0918119726910175e-05, |
|
"loss": 0.3202, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.9028459273797842, |
|
"grad_norm": 0.18140470030881337, |
|
"learning_rate": 1.0902601139737225e-05, |
|
"loss": 0.3329, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.9048086359175662, |
|
"grad_norm": 0.1546698941699075, |
|
"learning_rate": 1.0887080360695855e-05, |
|
"loss": 0.3306, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.9067713444553482, |
|
"grad_norm": 1.2221913272883775, |
|
"learning_rate": 1.0871557427476585e-05, |
|
"loss": 0.3399, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.9087340529931305, |
|
"grad_norm": 0.15426601384648553, |
|
"learning_rate": 1.0856032377775161e-05, |
|
"loss": 0.3358, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.9106967615309127, |
|
"grad_norm": 1.1114760751159305, |
|
"learning_rate": 1.0840505249292477e-05, |
|
"loss": 0.3356, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.9126594700686947, |
|
"grad_norm": 0.22900624172519007, |
|
"learning_rate": 1.0824976079734472e-05, |
|
"loss": 0.3134, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.914622178606477, |
|
"grad_norm": 0.9459775335785819, |
|
"learning_rate": 1.0809444906812034e-05, |
|
"loss": 0.3235, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.9165848871442592, |
|
"grad_norm": 0.17887149210801348, |
|
"learning_rate": 1.079391176824093e-05, |
|
"loss": 0.3304, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.9185475956820413, |
|
"grad_norm": 0.17484413383294636, |
|
"learning_rate": 1.0778376701741688e-05, |
|
"loss": 0.3298, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.9205103042198233, |
|
"grad_norm": 0.15868896830006096, |
|
"learning_rate": 1.0762839745039526e-05, |
|
"loss": 0.325, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.9224730127576055, |
|
"grad_norm": 0.13952635278363976, |
|
"learning_rate": 1.0747300935864245e-05, |
|
"loss": 0.3246, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.9244357212953878, |
|
"grad_norm": 0.14676838005524964, |
|
"learning_rate": 1.073176031195015e-05, |
|
"loss": 0.3367, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.9263984298331698, |
|
"grad_norm": 0.1393769904490578, |
|
"learning_rate": 1.0716217911035952e-05, |
|
"loss": 0.331, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.9283611383709518, |
|
"grad_norm": 0.12073516486793993, |
|
"learning_rate": 1.0700673770864673e-05, |
|
"loss": 0.3215, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.930323846908734, |
|
"grad_norm": 0.17261593509801376, |
|
"learning_rate": 1.0685127929183567e-05, |
|
"loss": 0.3377, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.9322865554465163, |
|
"grad_norm": 0.1288861939566597, |
|
"learning_rate": 1.0669580423744014e-05, |
|
"loss": 0.3268, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.9342492639842983, |
|
"grad_norm": 0.12242177876640145, |
|
"learning_rate": 1.0654031292301432e-05, |
|
"loss": 0.3331, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.9362119725220803, |
|
"grad_norm": 0.135438680122695, |
|
"learning_rate": 1.063848057261519e-05, |
|
"loss": 0.3276, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.9381746810598626, |
|
"grad_norm": 0.12113837642836334, |
|
"learning_rate": 1.0622928302448523e-05, |
|
"loss": 0.3196, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.9401373895976448, |
|
"grad_norm": 0.117144755644724, |
|
"learning_rate": 1.0607374519568412e-05, |
|
"loss": 0.3247, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.9421000981354268, |
|
"grad_norm": 0.11697508630127905, |
|
"learning_rate": 1.0591819261745528e-05, |
|
"loss": 0.3223, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.944062806673209, |
|
"grad_norm": 0.11238405496608947, |
|
"learning_rate": 1.0576262566754121e-05, |
|
"loss": 0.3212, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.9460255152109913, |
|
"grad_norm": 0.1139647888181511, |
|
"learning_rate": 1.0560704472371919e-05, |
|
"loss": 0.3253, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.9479882237487733, |
|
"grad_norm": 0.11466955150117668, |
|
"learning_rate": 1.0545145016380065e-05, |
|
"loss": 0.3273, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.9499509322865554, |
|
"grad_norm": 0.10970406400371188, |
|
"learning_rate": 1.0529584236562995e-05, |
|
"loss": 0.3254, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.9519136408243376, |
|
"grad_norm": 0.11114557334217089, |
|
"learning_rate": 1.0514022170708374e-05, |
|
"loss": 0.3246, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.9538763493621198, |
|
"grad_norm": 0.11801065397328833, |
|
"learning_rate": 1.0498458856606972e-05, |
|
"loss": 0.3294, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.9558390578999019, |
|
"grad_norm": 0.10405757613653188, |
|
"learning_rate": 1.0482894332052607e-05, |
|
"loss": 0.3261, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.9578017664376839, |
|
"grad_norm": 0.1103740005321899, |
|
"learning_rate": 1.0467328634842024e-05, |
|
"loss": 0.3289, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.9597644749754661, |
|
"grad_norm": 0.09987852489854114, |
|
"learning_rate": 1.0451761802774824e-05, |
|
"loss": 0.3172, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.9617271835132484, |
|
"grad_norm": 0.10893453828326174, |
|
"learning_rate": 1.0436193873653362e-05, |
|
"loss": 0.3228, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9636898920510304, |
|
"grad_norm": 0.10174600038711792, |
|
"learning_rate": 1.0420624885282653e-05, |
|
"loss": 0.3225, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.9656526005888124, |
|
"grad_norm": 0.11640082237562227, |
|
"learning_rate": 1.0405054875470287e-05, |
|
"loss": 0.3193, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.9676153091265947, |
|
"grad_norm": 0.15766357183117252, |
|
"learning_rate": 1.0389483882026334e-05, |
|
"loss": 0.3216, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.969578017664377, |
|
"grad_norm": 0.10997358802993172, |
|
"learning_rate": 1.037391194276326e-05, |
|
"loss": 0.3142, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.971540726202159, |
|
"grad_norm": 0.10152791127501247, |
|
"learning_rate": 1.0358339095495811e-05, |
|
"loss": 0.3169, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.9735034347399412, |
|
"grad_norm": 0.20219624773435688, |
|
"learning_rate": 1.0342765378040953e-05, |
|
"loss": 0.324, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.9754661432777234, |
|
"grad_norm": 0.10297719458762343, |
|
"learning_rate": 1.0327190828217763e-05, |
|
"loss": 0.3181, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.9774288518155054, |
|
"grad_norm": 0.11052971601307274, |
|
"learning_rate": 1.0311615483847333e-05, |
|
"loss": 0.3245, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.9793915603532874, |
|
"grad_norm": 0.09914734230876245, |
|
"learning_rate": 1.0296039382752687e-05, |
|
"loss": 0.324, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.9813542688910697, |
|
"grad_norm": 0.10159497435434399, |
|
"learning_rate": 1.028046256275869e-05, |
|
"loss": 0.3119, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.983316977428852, |
|
"grad_norm": 0.11056760688752829, |
|
"learning_rate": 1.0264885061691954e-05, |
|
"loss": 0.323, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.985279685966634, |
|
"grad_norm": 0.10804474565070168, |
|
"learning_rate": 1.0249306917380731e-05, |
|
"loss": 0.3113, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.987242394504416, |
|
"grad_norm": 0.13940608144653047, |
|
"learning_rate": 1.023372816765485e-05, |
|
"loss": 0.3123, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.9892051030421982, |
|
"grad_norm": 0.10238052023308335, |
|
"learning_rate": 1.0218148850345613e-05, |
|
"loss": 0.3235, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.9911678115799805, |
|
"grad_norm": 0.12527000090641732, |
|
"learning_rate": 1.0202569003285683e-05, |
|
"loss": 0.3337, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.9931305201177625, |
|
"grad_norm": 0.11514202583090338, |
|
"learning_rate": 1.0186988664309023e-05, |
|
"loss": 0.333, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.9950932286555445, |
|
"grad_norm": 0.11172112347247365, |
|
"learning_rate": 1.017140787125079e-05, |
|
"loss": 0.3227, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.9970559371933267, |
|
"grad_norm": 0.15649181628225953, |
|
"learning_rate": 1.0155826661947232e-05, |
|
"loss": 0.3208, |
|
"step": 1018 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2036, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 509, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.450464370500567e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|