|
{ |
|
"best_metric": 0.8061292767524719, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-500", |
|
"epoch": 0.2516039753428104, |
|
"eval_steps": 100, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005032079506856208, |
|
"grad_norm": 0.38297849893569946, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5872, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005032079506856208, |
|
"eval_loss": 1.0933938026428223, |
|
"eval_runtime": 104.1253, |
|
"eval_samples_per_second": 32.144, |
|
"eval_steps_per_second": 8.038, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010064159013712416, |
|
"grad_norm": 0.4977751076221466, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7907, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0015096238520568624, |
|
"grad_norm": 0.4053085148334503, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.7258, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0020128318027424832, |
|
"grad_norm": 0.3187407851219177, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6408, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002516039753428104, |
|
"grad_norm": 0.3603987395763397, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8964, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003019247704113725, |
|
"grad_norm": 0.4167505204677582, |
|
"learning_rate": 3e-05, |
|
"loss": 0.7977, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0035224556547993457, |
|
"grad_norm": 0.33529505133628845, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.741, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0040256636054849665, |
|
"grad_norm": 0.3075070083141327, |
|
"learning_rate": 4e-05, |
|
"loss": 0.69, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004528871556170587, |
|
"grad_norm": 0.374713271856308, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.7692, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005032079506856208, |
|
"grad_norm": 0.42237210273742676, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8035, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005535287457541829, |
|
"grad_norm": 0.44411763548851013, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.8701, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00603849540822745, |
|
"grad_norm": 0.41626623272895813, |
|
"learning_rate": 6e-05, |
|
"loss": 0.8624, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0065417033589130705, |
|
"grad_norm": 0.40315064787864685, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.8356, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007044911309598691, |
|
"grad_norm": 0.40788719058036804, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9516, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.007548119260284312, |
|
"grad_norm": 0.40332308411598206, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.9631, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008051327210969933, |
|
"grad_norm": 0.4551430344581604, |
|
"learning_rate": 8e-05, |
|
"loss": 0.9224, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.008554535161655554, |
|
"grad_norm": 0.4207383096218109, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.8631, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.009057743112341175, |
|
"grad_norm": 0.45070353150367737, |
|
"learning_rate": 9e-05, |
|
"loss": 0.867, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.009560951063026795, |
|
"grad_norm": 0.41204211115837097, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.8123, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.010064159013712416, |
|
"grad_norm": 0.4532608985900879, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8152, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010567366964398037, |
|
"grad_norm": 0.42522719502449036, |
|
"learning_rate": 9.999892908320647e-05, |
|
"loss": 0.9145, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.011070574915083658, |
|
"grad_norm": 0.43336769938468933, |
|
"learning_rate": 9.999571637870036e-05, |
|
"loss": 0.9232, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.011573782865769279, |
|
"grad_norm": 0.43486812710762024, |
|
"learning_rate": 9.999036202410325e-05, |
|
"loss": 0.9012, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0120769908164549, |
|
"grad_norm": 0.43434756994247437, |
|
"learning_rate": 9.998286624877786e-05, |
|
"loss": 0.8315, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01258019876714052, |
|
"grad_norm": 0.4306849539279938, |
|
"learning_rate": 9.997322937381829e-05, |
|
"loss": 0.9571, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013083406717826141, |
|
"grad_norm": 0.4672311246395111, |
|
"learning_rate": 9.996145181203615e-05, |
|
"loss": 0.8713, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.013586614668511762, |
|
"grad_norm": 0.4402594268321991, |
|
"learning_rate": 9.994753406794301e-05, |
|
"loss": 0.8639, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.014089822619197383, |
|
"grad_norm": 0.45539745688438416, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.8925, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.014593030569883004, |
|
"grad_norm": 0.48189201951026917, |
|
"learning_rate": 9.991328050923581e-05, |
|
"loss": 0.9734, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.015096238520568624, |
|
"grad_norm": 0.44766247272491455, |
|
"learning_rate": 9.989294616193017e-05, |
|
"loss": 0.8371, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015599446471254245, |
|
"grad_norm": 0.5342682003974915, |
|
"learning_rate": 9.98704745668676e-05, |
|
"loss": 0.9424, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.016102654421939866, |
|
"grad_norm": 0.5079587697982788, |
|
"learning_rate": 9.98458666866564e-05, |
|
"loss": 0.8545, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01660586237262549, |
|
"grad_norm": 0.5153644680976868, |
|
"learning_rate": 9.981912357541627e-05, |
|
"loss": 0.8507, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.017109070323311108, |
|
"grad_norm": 0.5304479002952576, |
|
"learning_rate": 9.97902463787331e-05, |
|
"loss": 0.7365, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01761227827399673, |
|
"grad_norm": 0.5784561634063721, |
|
"learning_rate": 9.975923633360985e-05, |
|
"loss": 0.8531, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01811548622468235, |
|
"grad_norm": 0.6525458097457886, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.9531, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.018618694175367972, |
|
"grad_norm": 0.59874427318573, |
|
"learning_rate": 9.969082310281891e-05, |
|
"loss": 0.8825, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01912190212605359, |
|
"grad_norm": 0.6657208800315857, |
|
"learning_rate": 9.965342284774632e-05, |
|
"loss": 1.013, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.019625110076739213, |
|
"grad_norm": 0.673851490020752, |
|
"learning_rate": 9.961389560529836e-05, |
|
"loss": 1.0122, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.020128318027424832, |
|
"grad_norm": 0.7437425255775452, |
|
"learning_rate": 9.957224306869053e-05, |
|
"loss": 0.9996, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.020631525978110455, |
|
"grad_norm": 0.6888524293899536, |
|
"learning_rate": 9.952846702217886e-05, |
|
"loss": 0.9479, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.021134733928796074, |
|
"grad_norm": 0.665365993976593, |
|
"learning_rate": 9.948256934098352e-05, |
|
"loss": 0.8076, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.021637941879481697, |
|
"grad_norm": 0.9384682774543762, |
|
"learning_rate": 9.943455199120837e-05, |
|
"loss": 1.1964, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.022141149830167316, |
|
"grad_norm": 1.06331205368042, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.1962, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.022644357780852938, |
|
"grad_norm": 1.0956038236618042, |
|
"learning_rate": 9.933216660424395e-05, |
|
"loss": 1.0293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.023147565731538557, |
|
"grad_norm": 1.0600272417068481, |
|
"learning_rate": 9.927780295290389e-05, |
|
"loss": 1.2817, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02365077368222418, |
|
"grad_norm": 1.032386064529419, |
|
"learning_rate": 9.922132840449459e-05, |
|
"loss": 1.1316, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0241539816329098, |
|
"grad_norm": 1.1418523788452148, |
|
"learning_rate": 9.916274537819775e-05, |
|
"loss": 0.9598, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02465718958359542, |
|
"grad_norm": 1.591659426689148, |
|
"learning_rate": 9.91020563835152e-05, |
|
"loss": 1.1625, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02516039753428104, |
|
"grad_norm": 1.7732504606246948, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 1.0553, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.025663605484966663, |
|
"grad_norm": 0.5284194946289062, |
|
"learning_rate": 9.897437097795257e-05, |
|
"loss": 0.6376, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.026166813435652282, |
|
"grad_norm": 0.6791232824325562, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.5686, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.026670021386337905, |
|
"grad_norm": 0.4423704445362091, |
|
"learning_rate": 9.883829406604363e-05, |
|
"loss": 0.6714, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.027173229337023524, |
|
"grad_norm": 0.4226953387260437, |
|
"learning_rate": 9.876711602542563e-05, |
|
"loss": 0.7254, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.027676437287709146, |
|
"grad_norm": 0.3696565628051758, |
|
"learning_rate": 9.869384896386668e-05, |
|
"loss": 0.8249, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.028179645238394765, |
|
"grad_norm": 0.2843206822872162, |
|
"learning_rate": 9.861849601988383e-05, |
|
"loss": 0.6783, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.028682853189080388, |
|
"grad_norm": 0.3041861057281494, |
|
"learning_rate": 9.854106042134641e-05, |
|
"loss": 0.7137, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.029186061139766007, |
|
"grad_norm": 0.39011338353157043, |
|
"learning_rate": 9.846154548533773e-05, |
|
"loss": 0.8799, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02968926909045163, |
|
"grad_norm": 0.40477269887924194, |
|
"learning_rate": 9.837995461801299e-05, |
|
"loss": 0.8491, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03019247704113725, |
|
"grad_norm": 0.3500373363494873, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.7849, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03069568499182287, |
|
"grad_norm": 0.3607657849788666, |
|
"learning_rate": 9.821055915851647e-05, |
|
"loss": 0.8036, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03119889294250849, |
|
"grad_norm": 0.3407435417175293, |
|
"learning_rate": 9.812276182268236e-05, |
|
"loss": 0.8677, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03170210089319411, |
|
"grad_norm": 0.34367769956588745, |
|
"learning_rate": 9.803290306789676e-05, |
|
"loss": 0.7972, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03220530884387973, |
|
"grad_norm": 0.3393014371395111, |
|
"learning_rate": 9.794098674340965e-05, |
|
"loss": 0.7821, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03270851679456535, |
|
"grad_norm": 0.399403840303421, |
|
"learning_rate": 9.784701678661045e-05, |
|
"loss": 0.9752, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03321172474525098, |
|
"grad_norm": 0.336550772190094, |
|
"learning_rate": 9.775099722285935e-05, |
|
"loss": 0.8358, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.033714932695936596, |
|
"grad_norm": 0.42282477021217346, |
|
"learning_rate": 9.765293216531486e-05, |
|
"loss": 0.8486, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.034218140646622215, |
|
"grad_norm": 0.3811139464378357, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.8015, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.034721348597307834, |
|
"grad_norm": 0.38650673627853394, |
|
"learning_rate": 9.74506824594107e-05, |
|
"loss": 0.8441, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03522455654799346, |
|
"grad_norm": 0.39965903759002686, |
|
"learning_rate": 9.73465064747553e-05, |
|
"loss": 0.9519, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03572776449867908, |
|
"grad_norm": 0.3865853548049927, |
|
"learning_rate": 9.724030232334391e-05, |
|
"loss": 0.8355, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0362309724493647, |
|
"grad_norm": 0.3812100887298584, |
|
"learning_rate": 9.713207455460894e-05, |
|
"loss": 0.815, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03673418040005032, |
|
"grad_norm": 0.40402311086654663, |
|
"learning_rate": 9.702182780466775e-05, |
|
"loss": 0.7945, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.037237388350735943, |
|
"grad_norm": 0.43912094831466675, |
|
"learning_rate": 9.690956679612421e-05, |
|
"loss": 0.9382, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03774059630142156, |
|
"grad_norm": 0.48043292760849, |
|
"learning_rate": 9.67952963378663e-05, |
|
"loss": 0.9166, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03824380425210718, |
|
"grad_norm": 0.47702670097351074, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 0.8208, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0387470122027928, |
|
"grad_norm": 0.4606092870235443, |
|
"learning_rate": 9.656074673794018e-05, |
|
"loss": 0.7812, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03925022015347843, |
|
"grad_norm": 0.410383403301239, |
|
"learning_rate": 9.644047764359622e-05, |
|
"loss": 0.7814, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.039753428104164046, |
|
"grad_norm": 0.43723535537719727, |
|
"learning_rate": 9.631821919375591e-05, |
|
"loss": 0.7831, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.040256636054849665, |
|
"grad_norm": 0.47842466831207275, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.8328, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04075984400553529, |
|
"grad_norm": 0.5565553903579712, |
|
"learning_rate": 9.606775526115963e-05, |
|
"loss": 0.8564, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04126305195622091, |
|
"grad_norm": 0.5389965772628784, |
|
"learning_rate": 9.593956050744492e-05, |
|
"loss": 0.9588, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04176625990690653, |
|
"grad_norm": 0.5707739591598511, |
|
"learning_rate": 9.580939785585681e-05, |
|
"loss": 0.8665, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04226946785759215, |
|
"grad_norm": 0.5401300191879272, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.8702, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.042772675808277774, |
|
"grad_norm": 0.5842973589897156, |
|
"learning_rate": 9.554319124605879e-05, |
|
"loss": 0.8452, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04327588375896339, |
|
"grad_norm": 0.526501476764679, |
|
"learning_rate": 9.540715869125407e-05, |
|
"loss": 0.8399, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04377909170964901, |
|
"grad_norm": 0.609856128692627, |
|
"learning_rate": 9.526918104489777e-05, |
|
"loss": 1.1022, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04428229966033463, |
|
"grad_norm": 0.6386977434158325, |
|
"learning_rate": 9.512926421749304e-05, |
|
"loss": 0.9333, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04478550761102026, |
|
"grad_norm": 0.6148085594177246, |
|
"learning_rate": 9.498741420261108e-05, |
|
"loss": 0.8722, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.045288715561705876, |
|
"grad_norm": 0.610058069229126, |
|
"learning_rate": 9.484363707663442e-05, |
|
"loss": 0.9483, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.045791923512391496, |
|
"grad_norm": 0.6424174904823303, |
|
"learning_rate": 9.469793899849661e-05, |
|
"loss": 0.9546, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.046295131463077115, |
|
"grad_norm": 0.6571527719497681, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.84, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04679833941376274, |
|
"grad_norm": 0.9009180665016174, |
|
"learning_rate": 9.440080503264037e-05, |
|
"loss": 1.2002, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04730154736444836, |
|
"grad_norm": 0.8972412347793579, |
|
"learning_rate": 9.42493818731521e-05, |
|
"loss": 0.9899, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04780475531513398, |
|
"grad_norm": 0.8759328722953796, |
|
"learning_rate": 9.409606321741775e-05, |
|
"loss": 0.9973, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0483079632658196, |
|
"grad_norm": 0.880595326423645, |
|
"learning_rate": 9.394085563309827e-05, |
|
"loss": 0.9668, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.048811171216505224, |
|
"grad_norm": 1.0916500091552734, |
|
"learning_rate": 9.378376576876999e-05, |
|
"loss": 1.0941, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.04931437916719084, |
|
"grad_norm": 0.9800620675086975, |
|
"learning_rate": 9.362480035363986e-05, |
|
"loss": 1.0244, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04981758711787646, |
|
"grad_norm": 1.3642239570617676, |
|
"learning_rate": 9.34639661972572e-05, |
|
"loss": 1.1874, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05032079506856208, |
|
"grad_norm": 1.7741835117340088, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.1594, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05032079506856208, |
|
"eval_loss": 0.8690917491912842, |
|
"eval_runtime": 104.3719, |
|
"eval_samples_per_second": 32.068, |
|
"eval_steps_per_second": 8.019, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05082400301924771, |
|
"grad_norm": 0.33244970440864563, |
|
"learning_rate": 9.31367192988896e-05, |
|
"loss": 0.6948, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.051327210969933326, |
|
"grad_norm": 0.3542371392250061, |
|
"learning_rate": 9.297032057507264e-05, |
|
"loss": 0.6499, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.051830418920618945, |
|
"grad_norm": 0.31864750385284424, |
|
"learning_rate": 9.280208114573859e-05, |
|
"loss": 0.5946, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.052333626871304564, |
|
"grad_norm": 0.31783977150917053, |
|
"learning_rate": 9.263200821770461e-05, |
|
"loss": 0.7341, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05283683482199019, |
|
"grad_norm": 0.27859652042388916, |
|
"learning_rate": 9.246010907632895e-05, |
|
"loss": 0.6446, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05334004277267581, |
|
"grad_norm": 0.29917001724243164, |
|
"learning_rate": 9.228639108519868e-05, |
|
"loss": 0.735, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05384325072336143, |
|
"grad_norm": 0.2935086190700531, |
|
"learning_rate": 9.211086168581433e-05, |
|
"loss": 0.6817, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05434645867404705, |
|
"grad_norm": 0.35257166624069214, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.7623, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.054849666624732674, |
|
"grad_norm": 0.37438997626304626, |
|
"learning_rate": 9.175439881593716e-05, |
|
"loss": 0.8425, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.05535287457541829, |
|
"grad_norm": 0.3695034980773926, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 0.7458, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05585608252610391, |
|
"grad_norm": 0.3363094925880432, |
|
"learning_rate": 9.139078154477512e-05, |
|
"loss": 0.8181, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05635929047678953, |
|
"grad_norm": 0.3058965504169464, |
|
"learning_rate": 9.120630943110077e-05, |
|
"loss": 0.6645, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.05686249842747516, |
|
"grad_norm": 0.31257951259613037, |
|
"learning_rate": 9.102007217627568e-05, |
|
"loss": 0.7269, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.057365706378160776, |
|
"grad_norm": 0.3484824299812317, |
|
"learning_rate": 9.083207775808396e-05, |
|
"loss": 0.8735, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.057868914328846395, |
|
"grad_norm": 0.3367786109447479, |
|
"learning_rate": 9.064233422958077e-05, |
|
"loss": 0.766, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.058372122279532014, |
|
"grad_norm": 0.371697336435318, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7952, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.05887533023021764, |
|
"grad_norm": 0.3939005136489868, |
|
"learning_rate": 9.025763242814291e-05, |
|
"loss": 0.895, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05937853818090326, |
|
"grad_norm": 0.40599775314331055, |
|
"learning_rate": 9.006269063455304e-05, |
|
"loss": 0.9353, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05988174613158888, |
|
"grad_norm": 0.4013155698776245, |
|
"learning_rate": 8.986603268863536e-05, |
|
"loss": 0.9384, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0603849540822745, |
|
"grad_norm": 0.41658180952072144, |
|
"learning_rate": 8.966766701456177e-05, |
|
"loss": 0.9412, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06088816203296012, |
|
"grad_norm": 0.38339975476264954, |
|
"learning_rate": 8.94676021096575e-05, |
|
"loss": 0.8672, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06139136998364574, |
|
"grad_norm": 0.4122770428657532, |
|
"learning_rate": 8.926584654403724e-05, |
|
"loss": 0.9392, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06189457793433136, |
|
"grad_norm": 0.4064811170101166, |
|
"learning_rate": 8.906240896023794e-05, |
|
"loss": 0.8886, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06239778588501698, |
|
"grad_norm": 0.40755781531333923, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.9177, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0629009938357026, |
|
"grad_norm": 0.4159265160560608, |
|
"learning_rate": 8.865052266813685e-05, |
|
"loss": 0.8445, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06340420178638823, |
|
"grad_norm": 0.403283953666687, |
|
"learning_rate": 8.844209160367299e-05, |
|
"loss": 0.8543, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06390740973707384, |
|
"grad_norm": 0.47934430837631226, |
|
"learning_rate": 8.823201380795001e-05, |
|
"loss": 0.8296, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06441061768775946, |
|
"grad_norm": 0.47067782282829285, |
|
"learning_rate": 8.802029828000156e-05, |
|
"loss": 0.936, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06491382563844508, |
|
"grad_norm": 0.501133382320404, |
|
"learning_rate": 8.780695408901613e-05, |
|
"loss": 0.8566, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0654170335891307, |
|
"grad_norm": 0.4946805536746979, |
|
"learning_rate": 8.759199037394887e-05, |
|
"loss": 0.9518, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06592024153981633, |
|
"grad_norm": 0.47323423624038696, |
|
"learning_rate": 8.737541634312985e-05, |
|
"loss": 0.8213, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.06642344949050195, |
|
"grad_norm": 0.4894852340221405, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.8871, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06692665744118757, |
|
"grad_norm": 0.5523199439048767, |
|
"learning_rate": 8.693747451206232e-05, |
|
"loss": 0.9287, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06742986539187319, |
|
"grad_norm": 0.5813436508178711, |
|
"learning_rate": 8.671612547178428e-05, |
|
"loss": 0.8782, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.06793307334255881, |
|
"grad_norm": 0.5135183334350586, |
|
"learning_rate": 8.649320363489179e-05, |
|
"loss": 0.7929, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06843628129324443, |
|
"grad_norm": 0.6729757189750671, |
|
"learning_rate": 8.626871855061438e-05, |
|
"loss": 1.014, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06893948924393005, |
|
"grad_norm": 0.5609135031700134, |
|
"learning_rate": 8.604267983514594e-05, |
|
"loss": 0.8092, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06944269719461567, |
|
"grad_norm": 0.5602811574935913, |
|
"learning_rate": 8.581509717123273e-05, |
|
"loss": 0.8407, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.0699459051453013, |
|
"grad_norm": 0.5880002379417419, |
|
"learning_rate": 8.558598030775857e-05, |
|
"loss": 0.8237, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07044911309598692, |
|
"grad_norm": 0.6810115575790405, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.9806, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07095232104667254, |
|
"grad_norm": 0.7699592113494873, |
|
"learning_rate": 8.51231833058426e-05, |
|
"loss": 0.9699, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07145552899735816, |
|
"grad_norm": 0.7383102178573608, |
|
"learning_rate": 8.488952299208401e-05, |
|
"loss": 0.9188, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07195873694804378, |
|
"grad_norm": 0.784283459186554, |
|
"learning_rate": 8.46543681272818e-05, |
|
"loss": 0.9962, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0724619448987294, |
|
"grad_norm": 0.8139053583145142, |
|
"learning_rate": 8.44177287846877e-05, |
|
"loss": 0.9982, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07296515284941502, |
|
"grad_norm": 0.9453422427177429, |
|
"learning_rate": 8.417961510114356e-05, |
|
"loss": 1.1149, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07346836080010063, |
|
"grad_norm": 0.8534306883811951, |
|
"learning_rate": 8.39400372766471e-05, |
|
"loss": 1.1168, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.07397156875078627, |
|
"grad_norm": 1.0746036767959595, |
|
"learning_rate": 8.36990055739149e-05, |
|
"loss": 1.1099, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07447477670147189, |
|
"grad_norm": 1.0740514993667603, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 1.0318, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.0749779846521575, |
|
"grad_norm": 1.3433160781860352, |
|
"learning_rate": 8.321262189556409e-05, |
|
"loss": 1.2778, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.07548119260284313, |
|
"grad_norm": 1.709749698638916, |
|
"learning_rate": 8.296729075500344e-05, |
|
"loss": 0.9436, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07598440055352874, |
|
"grad_norm": 0.2686924636363983, |
|
"learning_rate": 8.272054740543052e-05, |
|
"loss": 0.5435, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.07648760850421436, |
|
"grad_norm": 0.3222774565219879, |
|
"learning_rate": 8.247240241650918e-05, |
|
"loss": 0.6375, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.07699081645489998, |
|
"grad_norm": 0.30507364869117737, |
|
"learning_rate": 8.222286641794488e-05, |
|
"loss": 0.7499, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0774940244055856, |
|
"grad_norm": 0.269182950258255, |
|
"learning_rate": 8.197195009902924e-05, |
|
"loss": 0.5893, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.07799723235627123, |
|
"grad_norm": 0.2954126298427582, |
|
"learning_rate": 8.171966420818228e-05, |
|
"loss": 0.6649, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07850044030695685, |
|
"grad_norm": 0.3091728091239929, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.6618, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.07900364825764247, |
|
"grad_norm": 0.2694949805736542, |
|
"learning_rate": 8.121102699725089e-05, |
|
"loss": 0.6362, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.07950685620832809, |
|
"grad_norm": 0.3082178235054016, |
|
"learning_rate": 8.095469746549172e-05, |
|
"loss": 0.6229, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08001006415901371, |
|
"grad_norm": 0.3117753565311432, |
|
"learning_rate": 8.069704193751832e-05, |
|
"loss": 0.7307, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08051327210969933, |
|
"grad_norm": 0.32263603806495667, |
|
"learning_rate": 8.043807145043604e-05, |
|
"loss": 0.7396, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08101648006038495, |
|
"grad_norm": 0.406573086977005, |
|
"learning_rate": 8.017779709767858e-05, |
|
"loss": 0.924, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08151968801107058, |
|
"grad_norm": 0.3405095040798187, |
|
"learning_rate": 7.991623002853296e-05, |
|
"loss": 0.7669, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0820228959617562, |
|
"grad_norm": 0.3500613272190094, |
|
"learning_rate": 7.965338144766186e-05, |
|
"loss": 0.7785, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.08252610391244182, |
|
"grad_norm": 0.3334035575389862, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.7282, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08302931186312744, |
|
"grad_norm": 0.3296736180782318, |
|
"learning_rate": 7.912388484339012e-05, |
|
"loss": 0.714, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08353251981381306, |
|
"grad_norm": 0.3632432222366333, |
|
"learning_rate": 7.88572595018617e-05, |
|
"loss": 0.7388, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.08403572776449868, |
|
"grad_norm": 0.38017383217811584, |
|
"learning_rate": 7.858939801138061e-05, |
|
"loss": 0.8051, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0845389357151843, |
|
"grad_norm": 0.41629263758659363, |
|
"learning_rate": 7.832031184624164e-05, |
|
"loss": 0.9006, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.08504214366586992, |
|
"grad_norm": 0.40468400716781616, |
|
"learning_rate": 7.80500125332005e-05, |
|
"loss": 0.9159, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.08554535161655555, |
|
"grad_norm": 0.4251197576522827, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 0.9539, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08604855956724117, |
|
"grad_norm": 0.3863302171230316, |
|
"learning_rate": 7.750582082977467e-05, |
|
"loss": 0.7704, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.08655176751792679, |
|
"grad_norm": 0.38953644037246704, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.8375, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.0870549754686124, |
|
"grad_norm": 0.39719176292419434, |
|
"learning_rate": 7.695691614555003e-05, |
|
"loss": 0.8864, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.08755818341929802, |
|
"grad_norm": 0.4050215482711792, |
|
"learning_rate": 7.668072579578058e-05, |
|
"loss": 0.8163, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.08806139136998364, |
|
"grad_norm": 0.42585161328315735, |
|
"learning_rate": 7.64033925325184e-05, |
|
"loss": 0.8266, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08856459932066926, |
|
"grad_norm": 0.4436732232570648, |
|
"learning_rate": 7.612492823579745e-05, |
|
"loss": 0.9099, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.08906780727135488, |
|
"grad_norm": 0.41037729382514954, |
|
"learning_rate": 7.584534483410137e-05, |
|
"loss": 0.9768, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.08957101522204051, |
|
"grad_norm": 0.413972407579422, |
|
"learning_rate": 7.55646543038526e-05, |
|
"loss": 0.761, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09007422317272613, |
|
"grad_norm": 0.445989727973938, |
|
"learning_rate": 7.528286866889924e-05, |
|
"loss": 0.8737, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.09057743112341175, |
|
"grad_norm": 0.4886978566646576, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.8799, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09108063907409737, |
|
"grad_norm": 0.515007495880127, |
|
"learning_rate": 7.471606041430723e-05, |
|
"loss": 0.9363, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.09158384702478299, |
|
"grad_norm": 0.5086027979850769, |
|
"learning_rate": 7.443106207484776e-05, |
|
"loss": 0.8112, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09208705497546861, |
|
"grad_norm": 0.5021002292633057, |
|
"learning_rate": 7.414501719000187e-05, |
|
"loss": 0.8402, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.09259026292615423, |
|
"grad_norm": 0.5450283288955688, |
|
"learning_rate": 7.385793801298042e-05, |
|
"loss": 0.7513, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09309347087683985, |
|
"grad_norm": 0.5646865963935852, |
|
"learning_rate": 7.35698368412999e-05, |
|
"loss": 0.847, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09359667882752548, |
|
"grad_norm": 0.5946149230003357, |
|
"learning_rate": 7.328072601625557e-05, |
|
"loss": 0.8666, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0940998867782111, |
|
"grad_norm": 0.5803913474082947, |
|
"learning_rate": 7.2990617922393e-05, |
|
"loss": 0.9721, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.09460309472889672, |
|
"grad_norm": 0.6186138391494751, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.8006, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.09510630267958234, |
|
"grad_norm": 0.6833581328392029, |
|
"learning_rate": 7.240745967946113e-05, |
|
"loss": 0.9072, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.09560951063026796, |
|
"grad_norm": 0.804831326007843, |
|
"learning_rate": 7.211443451095007e-05, |
|
"loss": 0.8989, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09611271858095358, |
|
"grad_norm": 0.7454904317855835, |
|
"learning_rate": 7.18204620336671e-05, |
|
"loss": 0.9415, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.0966159265316392, |
|
"grad_norm": 0.7386621236801147, |
|
"learning_rate": 7.152555484041476e-05, |
|
"loss": 1.1014, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.09711913448232481, |
|
"grad_norm": 0.8027065396308899, |
|
"learning_rate": 7.122972556403567e-05, |
|
"loss": 0.9651, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.09762234243301045, |
|
"grad_norm": 0.843412458896637, |
|
"learning_rate": 7.09329868768714e-05, |
|
"loss": 0.9255, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.09812555038369607, |
|
"grad_norm": 0.8859095573425293, |
|
"learning_rate": 7.063535149021973e-05, |
|
"loss": 1.0216, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09862875833438169, |
|
"grad_norm": 0.9696882367134094, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 1.0431, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.0991319662850673, |
|
"grad_norm": 0.9128344655036926, |
|
"learning_rate": 7.003744165515705e-05, |
|
"loss": 0.9979, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.09963517423575292, |
|
"grad_norm": 1.0365312099456787, |
|
"learning_rate": 6.973719281921335e-05, |
|
"loss": 1.0488, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.10013838218643854, |
|
"grad_norm": 1.1797194480895996, |
|
"learning_rate": 6.943609850761979e-05, |
|
"loss": 1.0059, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.10064159013712416, |
|
"grad_norm": 2.792616367340088, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.9833, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10064159013712416, |
|
"eval_loss": 0.8332151174545288, |
|
"eval_runtime": 104.6119, |
|
"eval_samples_per_second": 31.994, |
|
"eval_steps_per_second": 8.001, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10114479808780978, |
|
"grad_norm": 0.22519482672214508, |
|
"learning_rate": 6.883142508466054e-05, |
|
"loss": 0.4304, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.10164800603849541, |
|
"grad_norm": 0.29026907682418823, |
|
"learning_rate": 6.852787187549182e-05, |
|
"loss": 0.5447, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.10215121398918103, |
|
"grad_norm": 0.24368657171726227, |
|
"learning_rate": 6.82235249939575e-05, |
|
"loss": 0.5302, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.10265442193986665, |
|
"grad_norm": 0.34774377942085266, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.6745, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.10315762989055227, |
|
"grad_norm": 0.3001360595226288, |
|
"learning_rate": 6.761250239606169e-05, |
|
"loss": 0.7076, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.10366083784123789, |
|
"grad_norm": 0.299600750207901, |
|
"learning_rate": 6.730585285387465e-05, |
|
"loss": 0.7122, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.10416404579192351, |
|
"grad_norm": 0.291136771440506, |
|
"learning_rate": 6.699846198654971e-05, |
|
"loss": 0.6374, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.10466725374260913, |
|
"grad_norm": 0.2981280982494354, |
|
"learning_rate": 6.669034296168855e-05, |
|
"loss": 0.7013, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.10517046169329475, |
|
"grad_norm": 0.3012051582336426, |
|
"learning_rate": 6.638150897808468e-05, |
|
"loss": 0.7481, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.10567366964398038, |
|
"grad_norm": 0.31151890754699707, |
|
"learning_rate": 6.607197326515808e-05, |
|
"loss": 0.6905, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.106176877594666, |
|
"grad_norm": 0.3481593132019043, |
|
"learning_rate": 6.57617490823885e-05, |
|
"loss": 0.8155, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.10668008554535162, |
|
"grad_norm": 0.3384002149105072, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.6831, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.10718329349603724, |
|
"grad_norm": 0.3902130722999573, |
|
"learning_rate": 6.513928849212873e-05, |
|
"loss": 0.8484, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.10768650144672286, |
|
"grad_norm": 0.32079294323921204, |
|
"learning_rate": 6.482707874877854e-05, |
|
"loss": 0.7408, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.10818970939740848, |
|
"grad_norm": 0.3088681101799011, |
|
"learning_rate": 6.451423386272312e-05, |
|
"loss": 0.7263, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1086929173480941, |
|
"grad_norm": 0.33241790533065796, |
|
"learning_rate": 6.420076723519614e-05, |
|
"loss": 0.7808, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.10919612529877971, |
|
"grad_norm": 0.36814364790916443, |
|
"learning_rate": 6.388669229406462e-05, |
|
"loss": 0.8254, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.10969933324946535, |
|
"grad_norm": 0.367464542388916, |
|
"learning_rate": 6.357202249325371e-05, |
|
"loss": 0.9331, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11020254120015097, |
|
"grad_norm": 0.3247632086277008, |
|
"learning_rate": 6.32567713121704e-05, |
|
"loss": 0.8049, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.11070574915083659, |
|
"grad_norm": 0.3686283230781555, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.8438, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1112089571015222, |
|
"grad_norm": 0.35872358083724976, |
|
"learning_rate": 6.26245788507579e-05, |
|
"loss": 0.8997, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.11171216505220782, |
|
"grad_norm": 0.4434812068939209, |
|
"learning_rate": 6.230766465144967e-05, |
|
"loss": 0.8421, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.11221537300289344, |
|
"grad_norm": 0.3434706926345825, |
|
"learning_rate": 6.199022323275083e-05, |
|
"loss": 0.7275, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.11271858095357906, |
|
"grad_norm": 0.3942730724811554, |
|
"learning_rate": 6.167226819279528e-05, |
|
"loss": 0.7783, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.11322178890426468, |
|
"grad_norm": 0.3891355097293854, |
|
"learning_rate": 6.135381315171867e-05, |
|
"loss": 0.7945, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11372499685495031, |
|
"grad_norm": 0.41171687841415405, |
|
"learning_rate": 6.103487175107507e-05, |
|
"loss": 0.9494, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.11422820480563593, |
|
"grad_norm": 0.41461703181266785, |
|
"learning_rate": 6.071545765325254e-05, |
|
"loss": 0.7896, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.11473141275632155, |
|
"grad_norm": 0.4718533754348755, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.8729, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.11523462070700717, |
|
"grad_norm": 0.49657323956489563, |
|
"learning_rate": 6.007526611628086e-05, |
|
"loss": 0.8849, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.11573782865769279, |
|
"grad_norm": 0.4768148958683014, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 0.9346, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.11624103660837841, |
|
"grad_norm": 0.4757716953754425, |
|
"learning_rate": 5.9433348234327765e-05, |
|
"loss": 0.8979, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.11674424455906403, |
|
"grad_norm": 0.5215921998023987, |
|
"learning_rate": 5.911177627460739e-05, |
|
"loss": 0.7759, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.11724745250974965, |
|
"grad_norm": 0.5007186532020569, |
|
"learning_rate": 5.8789813996717736e-05, |
|
"loss": 0.7752, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.11775066046043528, |
|
"grad_norm": 0.5211498737335205, |
|
"learning_rate": 5.8467475192451226e-05, |
|
"loss": 0.7625, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.1182538684111209, |
|
"grad_norm": 0.5580277442932129, |
|
"learning_rate": 5.814477366972945e-05, |
|
"loss": 0.8294, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11875707636180652, |
|
"grad_norm": 0.6667461395263672, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.9982, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.11926028431249214, |
|
"grad_norm": 0.6224911212921143, |
|
"learning_rate": 5.749833777770225e-05, |
|
"loss": 0.9011, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.11976349226317776, |
|
"grad_norm": 0.6415894627571106, |
|
"learning_rate": 5.717463109955896e-05, |
|
"loss": 0.8895, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.12026670021386338, |
|
"grad_norm": 0.6843252778053284, |
|
"learning_rate": 5.685061708409841e-05, |
|
"loss": 0.9872, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.120769908164549, |
|
"grad_norm": 0.6954467296600342, |
|
"learning_rate": 5.6526309611002594e-05, |
|
"loss": 0.8853, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12127311611523463, |
|
"grad_norm": 0.7757971286773682, |
|
"learning_rate": 5.6201722572524275e-05, |
|
"loss": 0.911, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.12177632406592025, |
|
"grad_norm": 0.7121040225028992, |
|
"learning_rate": 5.587686987289189e-05, |
|
"loss": 0.8526, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.12227953201660587, |
|
"grad_norm": 0.7902366518974304, |
|
"learning_rate": 5.5551765427713884e-05, |
|
"loss": 0.9972, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.12278273996729148, |
|
"grad_norm": 0.8224020600318909, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.993, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.1232859479179771, |
|
"grad_norm": 0.9266722798347473, |
|
"learning_rate": 5.490085701647805e-05, |
|
"loss": 1.0755, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.12378915586866272, |
|
"grad_norm": 1.0139940977096558, |
|
"learning_rate": 5.457508093317013e-05, |
|
"loss": 1.1419, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.12429236381934834, |
|
"grad_norm": 1.0336477756500244, |
|
"learning_rate": 5.4249108868622086e-05, |
|
"loss": 1.0416, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.12479557177003396, |
|
"grad_norm": 1.122233510017395, |
|
"learning_rate": 5.392295478639225e-05, |
|
"loss": 1.0255, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.1252987797207196, |
|
"grad_norm": 1.1999965906143188, |
|
"learning_rate": 5.359663265783598e-05, |
|
"loss": 1.035, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.1258019876714052, |
|
"grad_norm": 1.8675717115402222, |
|
"learning_rate": 5.327015646150716e-05, |
|
"loss": 1.2304, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.12630519562209083, |
|
"grad_norm": 0.25763022899627686, |
|
"learning_rate": 5.294354018255945e-05, |
|
"loss": 0.5091, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.12680840357277645, |
|
"grad_norm": 0.3074914216995239, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.5562, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.12731161152346207, |
|
"grad_norm": 0.2566604018211365, |
|
"learning_rate": 5.228994334682604e-05, |
|
"loss": 0.6086, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.1278148194741477, |
|
"grad_norm": 0.30195143818855286, |
|
"learning_rate": 5.196299078795344e-05, |
|
"loss": 0.6226, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.1283180274248333, |
|
"grad_norm": 0.22067229449748993, |
|
"learning_rate": 5.1635954141088813e-05, |
|
"loss": 0.5459, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.12882123537551893, |
|
"grad_norm": 0.25044503808021545, |
|
"learning_rate": 5.1308847415393666e-05, |
|
"loss": 0.6079, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.12932444332620455, |
|
"grad_norm": 0.3060043454170227, |
|
"learning_rate": 5.0981684623031415e-05, |
|
"loss": 0.6744, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.12982765127689017, |
|
"grad_norm": 0.312330961227417, |
|
"learning_rate": 5.0654479778567223e-05, |
|
"loss": 0.7216, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.13033085922757578, |
|
"grad_norm": 0.29661911725997925, |
|
"learning_rate": 5.0327246898367597e-05, |
|
"loss": 0.6644, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.1308340671782614, |
|
"grad_norm": 0.32180383801460266, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8465, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13133727512894705, |
|
"grad_norm": 0.34142372012138367, |
|
"learning_rate": 4.9672753101632415e-05, |
|
"loss": 0.8359, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.13184048307963267, |
|
"grad_norm": 0.31370797753334045, |
|
"learning_rate": 4.934552022143279e-05, |
|
"loss": 0.7117, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.1323436910303183, |
|
"grad_norm": 0.31379300355911255, |
|
"learning_rate": 4.901831537696859e-05, |
|
"loss": 0.7076, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.1328468989810039, |
|
"grad_norm": 0.2920902669429779, |
|
"learning_rate": 4.869115258460635e-05, |
|
"loss": 0.6233, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.13335010693168953, |
|
"grad_norm": 0.3320544362068176, |
|
"learning_rate": 4.83640458589112e-05, |
|
"loss": 0.8579, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.13385331488237515, |
|
"grad_norm": 0.3355720341205597, |
|
"learning_rate": 4.8037009212046586e-05, |
|
"loss": 0.7268, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.13435652283306077, |
|
"grad_norm": 0.3696479797363281, |
|
"learning_rate": 4.7710056653173976e-05, |
|
"loss": 0.8072, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.13485973078374638, |
|
"grad_norm": 0.36613941192626953, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.7789, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.135362938734432, |
|
"grad_norm": 0.4317779541015625, |
|
"learning_rate": 4.7056459817440544e-05, |
|
"loss": 0.8655, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.13586614668511762, |
|
"grad_norm": 0.3596924841403961, |
|
"learning_rate": 4.6729843538492847e-05, |
|
"loss": 0.7526, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13636935463580324, |
|
"grad_norm": 0.39818912744522095, |
|
"learning_rate": 4.640336734216403e-05, |
|
"loss": 0.8869, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.13687256258648886, |
|
"grad_norm": 0.39517346024513245, |
|
"learning_rate": 4.607704521360776e-05, |
|
"loss": 0.7689, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.13737577053717448, |
|
"grad_norm": 0.37257814407348633, |
|
"learning_rate": 4.575089113137792e-05, |
|
"loss": 0.8039, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.1378789784878601, |
|
"grad_norm": 0.39023667573928833, |
|
"learning_rate": 4.542491906682989e-05, |
|
"loss": 0.8586, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.13838218643854572, |
|
"grad_norm": 0.41638848185539246, |
|
"learning_rate": 4.509914298352197e-05, |
|
"loss": 0.8507, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.13888539438923134, |
|
"grad_norm": 0.3964393436908722, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.7734, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.13938860233991698, |
|
"grad_norm": 0.4602956473827362, |
|
"learning_rate": 4.444823457228612e-05, |
|
"loss": 0.9757, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.1398918102906026, |
|
"grad_norm": 0.420833557844162, |
|
"learning_rate": 4.412313012710813e-05, |
|
"loss": 0.7962, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.14039501824128822, |
|
"grad_norm": 0.4108290374279022, |
|
"learning_rate": 4.379827742747575e-05, |
|
"loss": 0.7666, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.14089822619197384, |
|
"grad_norm": 0.44668057560920715, |
|
"learning_rate": 4.347369038899744e-05, |
|
"loss": 0.9031, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14140143414265946, |
|
"grad_norm": 0.4494163990020752, |
|
"learning_rate": 4.3149382915901606e-05, |
|
"loss": 0.7926, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.14190464209334508, |
|
"grad_norm": 0.46361368894577026, |
|
"learning_rate": 4.282536890044104e-05, |
|
"loss": 0.8512, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.1424078500440307, |
|
"grad_norm": 0.4348432421684265, |
|
"learning_rate": 4.250166222229774e-05, |
|
"loss": 0.7417, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.14291105799471632, |
|
"grad_norm": 0.5306094288825989, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.791, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.14341426594540194, |
|
"grad_norm": 0.5647591948509216, |
|
"learning_rate": 4.185522633027057e-05, |
|
"loss": 0.8814, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14391747389608756, |
|
"grad_norm": 0.6206047534942627, |
|
"learning_rate": 4.153252480754877e-05, |
|
"loss": 0.9626, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.14442068184677317, |
|
"grad_norm": 0.5752646923065186, |
|
"learning_rate": 4.1210186003282275e-05, |
|
"loss": 0.9403, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.1449238897974588, |
|
"grad_norm": 0.6283671259880066, |
|
"learning_rate": 4.088822372539263e-05, |
|
"loss": 0.8473, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.1454270977481444, |
|
"grad_norm": 0.598839282989502, |
|
"learning_rate": 4.0566651765672246e-05, |
|
"loss": 0.7755, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.14593030569883003, |
|
"grad_norm": 0.6881972551345825, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 0.9936, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14643351364951565, |
|
"grad_norm": 0.7141215801239014, |
|
"learning_rate": 3.992473388371915e-05, |
|
"loss": 0.9047, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.14693672160020127, |
|
"grad_norm": 0.6933887600898743, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.8822, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.14743992955088692, |
|
"grad_norm": 0.8213725090026855, |
|
"learning_rate": 3.928454234674747e-05, |
|
"loss": 0.9608, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.14794313750157254, |
|
"grad_norm": 0.7432293891906738, |
|
"learning_rate": 3.896512824892495e-05, |
|
"loss": 0.9556, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.14844634545225815, |
|
"grad_norm": 0.8309504389762878, |
|
"learning_rate": 3.864618684828134e-05, |
|
"loss": 1.0111, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.14894955340294377, |
|
"grad_norm": 0.9679251313209534, |
|
"learning_rate": 3.832773180720475e-05, |
|
"loss": 1.0554, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.1494527613536294, |
|
"grad_norm": 0.952207088470459, |
|
"learning_rate": 3.800977676724919e-05, |
|
"loss": 1.1046, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.149955969304315, |
|
"grad_norm": 1.2538658380508423, |
|
"learning_rate": 3.769233534855035e-05, |
|
"loss": 1.0074, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.15045917725500063, |
|
"grad_norm": 1.117653250694275, |
|
"learning_rate": 3.73754211492421e-05, |
|
"loss": 1.1258, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.15096238520568625, |
|
"grad_norm": 2.037066698074341, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.9562, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15096238520568625, |
|
"eval_loss": 0.8160807490348816, |
|
"eval_runtime": 104.4474, |
|
"eval_samples_per_second": 32.045, |
|
"eval_steps_per_second": 8.014, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15146559315637187, |
|
"grad_norm": 0.26211196184158325, |
|
"learning_rate": 3.6743228687829595e-05, |
|
"loss": 0.5602, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.1519688011070575, |
|
"grad_norm": 0.26436537504196167, |
|
"learning_rate": 3.642797750674629e-05, |
|
"loss": 0.4846, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.1524720090577431, |
|
"grad_norm": 0.2748914361000061, |
|
"learning_rate": 3.6113307705935396e-05, |
|
"loss": 0.6021, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.15297521700842873, |
|
"grad_norm": 0.22355616092681885, |
|
"learning_rate": 3.579923276480387e-05, |
|
"loss": 0.4879, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.15347842495911435, |
|
"grad_norm": 0.27402985095977783, |
|
"learning_rate": 3.5485766137276894e-05, |
|
"loss": 0.7196, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.15398163290979996, |
|
"grad_norm": 0.23475830256938934, |
|
"learning_rate": 3.5172921251221455e-05, |
|
"loss": 0.5369, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.15448484086048558, |
|
"grad_norm": 0.28415268659591675, |
|
"learning_rate": 3.486071150787128e-05, |
|
"loss": 0.6017, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.1549880488111712, |
|
"grad_norm": 0.27458059787750244, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.6019, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.15549125676185685, |
|
"grad_norm": 0.32678642868995667, |
|
"learning_rate": 3.423825091761153e-05, |
|
"loss": 0.8207, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.15599446471254247, |
|
"grad_norm": 0.3003508746623993, |
|
"learning_rate": 3.392802673484193e-05, |
|
"loss": 0.7746, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1564976726632281, |
|
"grad_norm": 0.321545273065567, |
|
"learning_rate": 3.361849102191533e-05, |
|
"loss": 0.8657, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.1570008806139137, |
|
"grad_norm": 0.3106718361377716, |
|
"learning_rate": 3.330965703831146e-05, |
|
"loss": 0.6584, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.15750408856459933, |
|
"grad_norm": 0.32135796546936035, |
|
"learning_rate": 3.300153801345028e-05, |
|
"loss": 0.7068, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.15800729651528495, |
|
"grad_norm": 0.3328196108341217, |
|
"learning_rate": 3.2694147146125345e-05, |
|
"loss": 0.809, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.15851050446597056, |
|
"grad_norm": 0.33247265219688416, |
|
"learning_rate": 3.2387497603938326e-05, |
|
"loss": 0.7724, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.15901371241665618, |
|
"grad_norm": 0.3445248305797577, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.729, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.1595169203673418, |
|
"grad_norm": 0.3597284257411957, |
|
"learning_rate": 3.177647500604252e-05, |
|
"loss": 0.8055, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.16002012831802742, |
|
"grad_norm": 0.36206623911857605, |
|
"learning_rate": 3.147212812450819e-05, |
|
"loss": 0.8012, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.16052333626871304, |
|
"grad_norm": 0.34603044390678406, |
|
"learning_rate": 3.116857491533947e-05, |
|
"loss": 0.8477, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.16102654421939866, |
|
"grad_norm": 0.3707107901573181, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.7539, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16152975217008428, |
|
"grad_norm": 0.3770596385002136, |
|
"learning_rate": 3.056390149238022e-05, |
|
"loss": 0.8248, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.1620329601207699, |
|
"grad_norm": 0.3955375552177429, |
|
"learning_rate": 3.0262807180786647e-05, |
|
"loss": 0.8374, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.16253616807145552, |
|
"grad_norm": 0.3951384127140045, |
|
"learning_rate": 2.996255834484296e-05, |
|
"loss": 0.8669, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.16303937602214116, |
|
"grad_norm": 0.39075663685798645, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.8174, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.16354258397282678, |
|
"grad_norm": 0.41376301646232605, |
|
"learning_rate": 2.936464850978027e-05, |
|
"loss": 0.87, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1640457919235124, |
|
"grad_norm": 0.37442007660865784, |
|
"learning_rate": 2.9067013123128613e-05, |
|
"loss": 0.7344, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.16454899987419802, |
|
"grad_norm": 0.4042944610118866, |
|
"learning_rate": 2.8770274435964355e-05, |
|
"loss": 0.7885, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.16505220782488364, |
|
"grad_norm": 0.4240545332431793, |
|
"learning_rate": 2.8474445159585235e-05, |
|
"loss": 0.8146, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.16555541577556926, |
|
"grad_norm": 0.4908733665943146, |
|
"learning_rate": 2.8179537966332887e-05, |
|
"loss": 0.9611, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.16605862372625488, |
|
"grad_norm": 0.46376368403434753, |
|
"learning_rate": 2.7885565489049946e-05, |
|
"loss": 0.7945, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1665618316769405, |
|
"grad_norm": 0.5365279912948608, |
|
"learning_rate": 2.759254032053888e-05, |
|
"loss": 0.9014, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.16706503962762612, |
|
"grad_norm": 0.5090306997299194, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.8049, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.16756824757831174, |
|
"grad_norm": 0.536597728729248, |
|
"learning_rate": 2.700938207760701e-05, |
|
"loss": 0.7892, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.16807145552899735, |
|
"grad_norm": 0.5863428115844727, |
|
"learning_rate": 2.671927398374443e-05, |
|
"loss": 0.7805, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.16857466347968297, |
|
"grad_norm": 0.49151116609573364, |
|
"learning_rate": 2.6430163158700115e-05, |
|
"loss": 0.7139, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1690778714303686, |
|
"grad_norm": 0.5732700824737549, |
|
"learning_rate": 2.6142061987019577e-05, |
|
"loss": 0.7674, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.1695810793810542, |
|
"grad_norm": 0.7063980102539062, |
|
"learning_rate": 2.5854982809998153e-05, |
|
"loss": 0.8623, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.17008428733173983, |
|
"grad_norm": 0.5727605223655701, |
|
"learning_rate": 2.556893792515227e-05, |
|
"loss": 0.7973, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.17058749528242545, |
|
"grad_norm": 0.6519417762756348, |
|
"learning_rate": 2.5283939585692783e-05, |
|
"loss": 0.9101, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.1710907032331111, |
|
"grad_norm": 0.6710662245750427, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.9189, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17159391118379672, |
|
"grad_norm": 0.7912317514419556, |
|
"learning_rate": 2.471713133110078e-05, |
|
"loss": 0.9465, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.17209711913448233, |
|
"grad_norm": 0.9441129565238953, |
|
"learning_rate": 2.4435345696147403e-05, |
|
"loss": 1.2199, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.17260032708516795, |
|
"grad_norm": 0.80022794008255, |
|
"learning_rate": 2.4154655165898627e-05, |
|
"loss": 0.9379, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.17310353503585357, |
|
"grad_norm": 0.7774207592010498, |
|
"learning_rate": 2.3875071764202563e-05, |
|
"loss": 0.8809, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.1736067429865392, |
|
"grad_norm": 0.8378373980522156, |
|
"learning_rate": 2.3596607467481603e-05, |
|
"loss": 0.9557, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1741099509372248, |
|
"grad_norm": 0.8967252373695374, |
|
"learning_rate": 2.3319274204219428e-05, |
|
"loss": 0.8607, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.17461315888791043, |
|
"grad_norm": 0.870495080947876, |
|
"learning_rate": 2.3043083854449988e-05, |
|
"loss": 0.8724, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.17511636683859605, |
|
"grad_norm": 1.1094945669174194, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.8732, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.17561957478928167, |
|
"grad_norm": 1.1429485082626343, |
|
"learning_rate": 2.2494179170225333e-05, |
|
"loss": 0.8457, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.1761227827399673, |
|
"grad_norm": 1.5905135869979858, |
|
"learning_rate": 2.2221488349019903e-05, |
|
"loss": 0.9063, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1766259906906529, |
|
"grad_norm": 0.23821896314620972, |
|
"learning_rate": 2.194998746679952e-05, |
|
"loss": 0.4251, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.17712919864133853, |
|
"grad_norm": 0.23636475205421448, |
|
"learning_rate": 2.167968815375837e-05, |
|
"loss": 0.3922, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.17763240659202414, |
|
"grad_norm": 0.25628796219825745, |
|
"learning_rate": 2.1410601988619394e-05, |
|
"loss": 0.5204, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.17813561454270976, |
|
"grad_norm": 0.2599431574344635, |
|
"learning_rate": 2.1142740498138324e-05, |
|
"loss": 0.683, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.17863882249339538, |
|
"grad_norm": 0.26432743668556213, |
|
"learning_rate": 2.08761151566099e-05, |
|
"loss": 0.6276, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.17914203044408103, |
|
"grad_norm": 0.23032639920711517, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.5113, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.17964523839476665, |
|
"grad_norm": 0.27185848355293274, |
|
"learning_rate": 2.034661855233815e-05, |
|
"loss": 0.6249, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.18014844634545227, |
|
"grad_norm": 0.3025369942188263, |
|
"learning_rate": 2.008376997146705e-05, |
|
"loss": 0.753, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.1806516542961379, |
|
"grad_norm": 0.31796345114707947, |
|
"learning_rate": 1.982220290232143e-05, |
|
"loss": 0.6984, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.1811548622468235, |
|
"grad_norm": 0.31694695353507996, |
|
"learning_rate": 1.9561928549563968e-05, |
|
"loss": 0.7719, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.18165807019750912, |
|
"grad_norm": 0.30704811215400696, |
|
"learning_rate": 1.9302958062481673e-05, |
|
"loss": 0.6763, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.18216127814819474, |
|
"grad_norm": 0.32100924849510193, |
|
"learning_rate": 1.9045302534508297e-05, |
|
"loss": 0.6178, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.18266448609888036, |
|
"grad_norm": 0.3147682845592499, |
|
"learning_rate": 1.8788973002749112e-05, |
|
"loss": 0.7238, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.18316769404956598, |
|
"grad_norm": 0.34390130639076233, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 0.793, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.1836709020002516, |
|
"grad_norm": 0.36561378836631775, |
|
"learning_rate": 1.8280335791817733e-05, |
|
"loss": 0.7969, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.18417410995093722, |
|
"grad_norm": 0.3845183551311493, |
|
"learning_rate": 1.8028049900970767e-05, |
|
"loss": 0.8577, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.18467731790162284, |
|
"grad_norm": 0.35048288106918335, |
|
"learning_rate": 1.777713358205514e-05, |
|
"loss": 0.7802, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.18518052585230846, |
|
"grad_norm": 0.40498775243759155, |
|
"learning_rate": 1.7527597583490822e-05, |
|
"loss": 0.9165, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.18568373380299408, |
|
"grad_norm": 0.38008689880371094, |
|
"learning_rate": 1.7279452594569483e-05, |
|
"loss": 0.8561, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.1861869417536797, |
|
"grad_norm": 0.39767763018608093, |
|
"learning_rate": 1.703270924499656e-05, |
|
"loss": 0.7432, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18669014970436532, |
|
"grad_norm": 0.40831029415130615, |
|
"learning_rate": 1.678737810443593e-05, |
|
"loss": 0.8613, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.18719335765505096, |
|
"grad_norm": 0.37755683064460754, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.9187, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.18769656560573658, |
|
"grad_norm": 0.41120558977127075, |
|
"learning_rate": 1.6300994426085103e-05, |
|
"loss": 0.851, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.1881997735564222, |
|
"grad_norm": 0.3905021846294403, |
|
"learning_rate": 1.605996272335291e-05, |
|
"loss": 0.7259, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.18870298150710782, |
|
"grad_norm": 0.8140775561332703, |
|
"learning_rate": 1.5820384898856434e-05, |
|
"loss": 0.9068, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.18920618945779344, |
|
"grad_norm": 0.40314650535583496, |
|
"learning_rate": 1.5582271215312294e-05, |
|
"loss": 0.7781, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.18970939740847906, |
|
"grad_norm": 0.4199778735637665, |
|
"learning_rate": 1.5345631872718214e-05, |
|
"loss": 0.8189, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.19021260535916468, |
|
"grad_norm": 0.44229626655578613, |
|
"learning_rate": 1.5110477007916001e-05, |
|
"loss": 0.8987, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.1907158133098503, |
|
"grad_norm": 0.4618576467037201, |
|
"learning_rate": 1.4876816694157419e-05, |
|
"loss": 0.7793, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.19121902126053592, |
|
"grad_norm": 0.47276002168655396, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.787, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19172222921122153, |
|
"grad_norm": 0.50196772813797, |
|
"learning_rate": 1.4414019692241437e-05, |
|
"loss": 0.8903, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.19222543716190715, |
|
"grad_norm": 0.4731937348842621, |
|
"learning_rate": 1.4184902828767287e-05, |
|
"loss": 0.7957, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.19272864511259277, |
|
"grad_norm": 0.5056723952293396, |
|
"learning_rate": 1.3957320164854059e-05, |
|
"loss": 0.8921, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.1932318530632784, |
|
"grad_norm": 0.5013747215270996, |
|
"learning_rate": 1.373128144938563e-05, |
|
"loss": 0.7901, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.193735061013964, |
|
"grad_norm": 0.521843671798706, |
|
"learning_rate": 1.3506796365108232e-05, |
|
"loss": 0.7771, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.19423826896464963, |
|
"grad_norm": 0.5642167329788208, |
|
"learning_rate": 1.3283874528215733e-05, |
|
"loss": 0.7303, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.19474147691533525, |
|
"grad_norm": 0.5664670467376709, |
|
"learning_rate": 1.3062525487937699e-05, |
|
"loss": 0.7348, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.1952446848660209, |
|
"grad_norm": 0.6890117526054382, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.9545, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.19574789281670651, |
|
"grad_norm": 0.6803514957427979, |
|
"learning_rate": 1.2624583656870154e-05, |
|
"loss": 0.9202, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.19625110076739213, |
|
"grad_norm": 0.6082613468170166, |
|
"learning_rate": 1.2408009626051137e-05, |
|
"loss": 0.8661, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.19675430871807775, |
|
"grad_norm": 0.70882648229599, |
|
"learning_rate": 1.2193045910983863e-05, |
|
"loss": 1.0095, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.19725751666876337, |
|
"grad_norm": 0.6985040307044983, |
|
"learning_rate": 1.1979701719998453e-05, |
|
"loss": 0.8489, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.197760724619449, |
|
"grad_norm": 0.6823087930679321, |
|
"learning_rate": 1.1767986192049984e-05, |
|
"loss": 0.8537, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.1982639325701346, |
|
"grad_norm": 0.8139995336532593, |
|
"learning_rate": 1.1557908396327028e-05, |
|
"loss": 0.9329, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.19876714052082023, |
|
"grad_norm": 0.8146170377731323, |
|
"learning_rate": 1.134947733186315e-05, |
|
"loss": 1.0423, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.19927034847150585, |
|
"grad_norm": 0.8821521401405334, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.9563, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.19977355642219147, |
|
"grad_norm": 1.0392780303955078, |
|
"learning_rate": 1.0937591039762085e-05, |
|
"loss": 1.01, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.2002767643728771, |
|
"grad_norm": 0.8813369870185852, |
|
"learning_rate": 1.0734153455962765e-05, |
|
"loss": 1.0518, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.2007799723235627, |
|
"grad_norm": 1.0717155933380127, |
|
"learning_rate": 1.0532397890342505e-05, |
|
"loss": 0.9553, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.20128318027424832, |
|
"grad_norm": 1.5049222707748413, |
|
"learning_rate": 1.0332332985438248e-05, |
|
"loss": 0.9136, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.20128318027424832, |
|
"eval_loss": 0.8091070055961609, |
|
"eval_runtime": 104.4834, |
|
"eval_samples_per_second": 32.034, |
|
"eval_steps_per_second": 8.011, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.20178638822493394, |
|
"grad_norm": 0.2686651647090912, |
|
"learning_rate": 1.013396731136465e-05, |
|
"loss": 0.5779, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.20228959617561956, |
|
"grad_norm": 0.3213135600090027, |
|
"learning_rate": 9.937309365446973e-06, |
|
"loss": 0.5277, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.2027928041263052, |
|
"grad_norm": 0.22786889970302582, |
|
"learning_rate": 9.742367571857091e-06, |
|
"loss": 0.595, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.20329601207699083, |
|
"grad_norm": 0.23553386330604553, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.5259, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.20379922002767645, |
|
"grad_norm": 0.21647228300571442, |
|
"learning_rate": 9.357665770419244e-06, |
|
"loss": 0.4914, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.20430242797836207, |
|
"grad_norm": 0.24146980047225952, |
|
"learning_rate": 9.167922241916055e-06, |
|
"loss": 0.5754, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.20480563592904769, |
|
"grad_norm": 0.25054699182510376, |
|
"learning_rate": 8.97992782372432e-06, |
|
"loss": 0.6017, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.2053088438797333, |
|
"grad_norm": 0.3012691140174866, |
|
"learning_rate": 8.793690568899216e-06, |
|
"loss": 0.7198, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.20581205183041892, |
|
"grad_norm": 0.3064156174659729, |
|
"learning_rate": 8.609218455224893e-06, |
|
"loss": 0.7218, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.20631525978110454, |
|
"grad_norm": 0.3092438578605652, |
|
"learning_rate": 8.426519384872733e-06, |
|
"loss": 0.7895, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.20681846773179016, |
|
"grad_norm": 0.3434111475944519, |
|
"learning_rate": 8.245601184062852e-06, |
|
"loss": 0.7085, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.20732167568247578, |
|
"grad_norm": 0.37145957350730896, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 0.7766, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.2078248836331614, |
|
"grad_norm": 0.32149067521095276, |
|
"learning_rate": 7.889138314185678e-06, |
|
"loss": 0.6546, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.20832809158384702, |
|
"grad_norm": 0.33787834644317627, |
|
"learning_rate": 7.71360891480134e-06, |
|
"loss": 0.7035, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.20883129953453264, |
|
"grad_norm": 0.35734379291534424, |
|
"learning_rate": 7.539890923671062e-06, |
|
"loss": 0.7837, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.20933450748521826, |
|
"grad_norm": 0.40482255816459656, |
|
"learning_rate": 7.367991782295391e-06, |
|
"loss": 0.7856, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.20983771543590388, |
|
"grad_norm": 0.35036709904670715, |
|
"learning_rate": 7.197918854261432e-06, |
|
"loss": 0.7238, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.2103409233865895, |
|
"grad_norm": 0.33897969126701355, |
|
"learning_rate": 7.029679424927365e-06, |
|
"loss": 0.7213, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.21084413133727514, |
|
"grad_norm": 0.4156653881072998, |
|
"learning_rate": 6.863280701110408e-06, |
|
"loss": 0.8968, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.21134733928796076, |
|
"grad_norm": 0.4062703251838684, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.8871, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21185054723864638, |
|
"grad_norm": 0.3825936019420624, |
|
"learning_rate": 6.536033802742813e-06, |
|
"loss": 0.8309, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.212353755189332, |
|
"grad_norm": 0.3812710642814636, |
|
"learning_rate": 6.375199646360142e-06, |
|
"loss": 0.7463, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.21285696314001762, |
|
"grad_norm": 0.39211493730545044, |
|
"learning_rate": 6.216234231230012e-06, |
|
"loss": 0.8161, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.21336017109070324, |
|
"grad_norm": 0.38219356536865234, |
|
"learning_rate": 6.059144366901736e-06, |
|
"loss": 0.7659, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.21386337904138886, |
|
"grad_norm": 0.4277362823486328, |
|
"learning_rate": 5.903936782582253e-06, |
|
"loss": 0.7985, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.21436658699207448, |
|
"grad_norm": 0.43157392740249634, |
|
"learning_rate": 5.750618126847912e-06, |
|
"loss": 0.9045, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.2148697949427601, |
|
"grad_norm": 0.3815480172634125, |
|
"learning_rate": 5.599194967359639e-06, |
|
"loss": 0.6918, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.21537300289344571, |
|
"grad_norm": 0.43258634209632874, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 0.8096, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.21587621084413133, |
|
"grad_norm": 0.4353589415550232, |
|
"learning_rate": 5.302061001503394e-06, |
|
"loss": 0.8579, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.21637941879481695, |
|
"grad_norm": 0.4448375999927521, |
|
"learning_rate": 5.156362923365588e-06, |
|
"loss": 0.7577, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.21688262674550257, |
|
"grad_norm": 0.5607813000679016, |
|
"learning_rate": 5.012585797388936e-06, |
|
"loss": 0.9384, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.2173858346961882, |
|
"grad_norm": 0.514840304851532, |
|
"learning_rate": 4.87073578250698e-06, |
|
"loss": 0.9132, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.2178890426468738, |
|
"grad_norm": 0.4784408211708069, |
|
"learning_rate": 4.730818955102234e-06, |
|
"loss": 0.7861, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.21839225059755943, |
|
"grad_norm": 0.5339136123657227, |
|
"learning_rate": 4.592841308745932e-06, |
|
"loss": 0.8646, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.21889545854824508, |
|
"grad_norm": 0.5781549215316772, |
|
"learning_rate": 4.456808753941205e-06, |
|
"loss": 0.8948, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.2193986664989307, |
|
"grad_norm": 0.5680283308029175, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.8141, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.2199018744496163, |
|
"grad_norm": 0.5979980826377869, |
|
"learning_rate": 4.190602144143207e-06, |
|
"loss": 0.8601, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.22040508240030193, |
|
"grad_norm": 0.60759037733078, |
|
"learning_rate": 4.06043949255509e-06, |
|
"loss": 0.7886, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.22090829035098755, |
|
"grad_norm": 0.5867761373519897, |
|
"learning_rate": 3.932244738840379e-06, |
|
"loss": 0.785, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.22141149830167317, |
|
"grad_norm": 0.6567158699035645, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 0.7419, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2219147062523588, |
|
"grad_norm": 0.6796500086784363, |
|
"learning_rate": 3.681780806244095e-06, |
|
"loss": 0.8693, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.2224179142030444, |
|
"grad_norm": 0.9281523823738098, |
|
"learning_rate": 3.5595223564037884e-06, |
|
"loss": 1.019, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.22292112215373003, |
|
"grad_norm": 0.8401932120323181, |
|
"learning_rate": 3.4392532620598216e-06, |
|
"loss": 1.0439, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.22342433010441565, |
|
"grad_norm": 0.8805630207061768, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 1.0998, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.22392753805510127, |
|
"grad_norm": 0.9491791725158691, |
|
"learning_rate": 3.2047036621337236e-06, |
|
"loss": 1.0169, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.22443074600578689, |
|
"grad_norm": 0.8668059706687927, |
|
"learning_rate": 3.0904332038757977e-06, |
|
"loss": 0.8529, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.2249339539564725, |
|
"grad_norm": 0.9326903223991394, |
|
"learning_rate": 2.978172195332263e-06, |
|
"loss": 1.1405, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.22543716190715812, |
|
"grad_norm": 1.0572550296783447, |
|
"learning_rate": 2.8679254453910785e-06, |
|
"loss": 0.9234, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.22594036985784374, |
|
"grad_norm": 1.223471760749817, |
|
"learning_rate": 2.759697676656098e-06, |
|
"loss": 0.8797, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.22644357780852936, |
|
"grad_norm": 1.5660356283187866, |
|
"learning_rate": 2.653493525244721e-06, |
|
"loss": 0.8981, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.226946785759215, |
|
"grad_norm": 0.23308148980140686, |
|
"learning_rate": 2.549317540589308e-06, |
|
"loss": 0.4266, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.22744999370990063, |
|
"grad_norm": 0.25011494755744934, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.4767, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.22795320166058625, |
|
"grad_norm": 0.28921380639076233, |
|
"learning_rate": 2.3470678346851518e-06, |
|
"loss": 0.6835, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.22845640961127187, |
|
"grad_norm": 0.23749534785747528, |
|
"learning_rate": 2.2490027771406687e-06, |
|
"loss": 0.6596, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.22895961756195748, |
|
"grad_norm": 0.2435358464717865, |
|
"learning_rate": 2.152983213389559e-06, |
|
"loss": 0.585, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2294628255126431, |
|
"grad_norm": 0.26918700337409973, |
|
"learning_rate": 2.0590132565903476e-06, |
|
"loss": 0.5973, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.22996603346332872, |
|
"grad_norm": 0.2611250877380371, |
|
"learning_rate": 1.9670969321032407e-06, |
|
"loss": 0.59, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.23046924141401434, |
|
"grad_norm": 0.26343515515327454, |
|
"learning_rate": 1.8772381773176417e-06, |
|
"loss": 0.5985, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.23097244936469996, |
|
"grad_norm": 0.27780017256736755, |
|
"learning_rate": 1.7894408414835362e-06, |
|
"loss": 0.6332, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.23147565731538558, |
|
"grad_norm": 0.3137127161026001, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.6877, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2319788652660712, |
|
"grad_norm": 0.2799319624900818, |
|
"learning_rate": 1.620045381987012e-06, |
|
"loss": 0.6106, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.23248207321675682, |
|
"grad_norm": 0.33448776602745056, |
|
"learning_rate": 1.5384545146622852e-06, |
|
"loss": 0.8003, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.23298528116744244, |
|
"grad_norm": 0.3402840793132782, |
|
"learning_rate": 1.4589395786535953e-06, |
|
"loss": 0.7416, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.23348848911812806, |
|
"grad_norm": 0.3084582984447479, |
|
"learning_rate": 1.3815039801161721e-06, |
|
"loss": 0.7153, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.23399169706881368, |
|
"grad_norm": 0.348308265209198, |
|
"learning_rate": 1.3061510361333185e-06, |
|
"loss": 0.8734, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2344949050194993, |
|
"grad_norm": 0.35534948110580444, |
|
"learning_rate": 1.232883974574367e-06, |
|
"loss": 0.8548, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.23499811297018494, |
|
"grad_norm": 0.35696539282798767, |
|
"learning_rate": 1.1617059339563807e-06, |
|
"loss": 0.8498, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.23550132092087056, |
|
"grad_norm": 0.4020044207572937, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.818, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.23600452887155618, |
|
"grad_norm": 0.38172829151153564, |
|
"learning_rate": 1.0256290220474307e-06, |
|
"loss": 0.8157, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.2365077368222418, |
|
"grad_norm": 0.3517870604991913, |
|
"learning_rate": 9.607359798384785e-07, |
|
"loss": 0.7573, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.23701094477292742, |
|
"grad_norm": 0.3962482213973999, |
|
"learning_rate": 8.979436164848088e-07, |
|
"loss": 0.7767, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.23751415272361304, |
|
"grad_norm": 0.42771947383880615, |
|
"learning_rate": 8.372546218022747e-07, |
|
"loss": 0.9145, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.23801736067429866, |
|
"grad_norm": 0.3869607746601105, |
|
"learning_rate": 7.786715955054203e-07, |
|
"loss": 0.8365, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.23852056862498427, |
|
"grad_norm": 0.3934659957885742, |
|
"learning_rate": 7.221970470961125e-07, |
|
"loss": 0.8322, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.2390237765756699, |
|
"grad_norm": 0.3955768048763275, |
|
"learning_rate": 6.678333957560512e-07, |
|
"loss": 0.8821, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2395269845263555, |
|
"grad_norm": 0.4069364368915558, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 0.8928, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.24003019247704113, |
|
"grad_norm": 0.47845911979675293, |
|
"learning_rate": 5.654480087916303e-07, |
|
"loss": 0.8978, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.24053340042772675, |
|
"grad_norm": 0.46929192543029785, |
|
"learning_rate": 5.174306590164879e-07, |
|
"loss": 0.887, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.24103660837841237, |
|
"grad_norm": 0.42585262656211853, |
|
"learning_rate": 4.715329778211375e-07, |
|
"loss": 0.7955, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.241539816329098, |
|
"grad_norm": 0.47433245182037354, |
|
"learning_rate": 4.277569313094809e-07, |
|
"loss": 0.8784, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2420430242797836, |
|
"grad_norm": 0.4917643368244171, |
|
"learning_rate": 3.8610439470164737e-07, |
|
"loss": 0.8199, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.24254623223046926, |
|
"grad_norm": 0.5189266204833984, |
|
"learning_rate": 3.465771522536854e-07, |
|
"loss": 0.8448, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.24304944018115487, |
|
"grad_norm": 0.5650250315666199, |
|
"learning_rate": 3.09176897181096e-07, |
|
"loss": 0.8662, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.2435526481318405, |
|
"grad_norm": 0.5354907512664795, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.8206, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.2440558560825261, |
|
"grad_norm": 0.526403546333313, |
|
"learning_rate": 2.407636663901591e-07, |
|
"loss": 0.7682, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.24455906403321173, |
|
"grad_norm": 0.5894873738288879, |
|
"learning_rate": 2.0975362126691712e-07, |
|
"loss": 0.8226, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.24506227198389735, |
|
"grad_norm": 0.6889277696609497, |
|
"learning_rate": 1.8087642458373134e-07, |
|
"loss": 0.8529, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.24556547993458297, |
|
"grad_norm": 0.6041770577430725, |
|
"learning_rate": 1.5413331334360182e-07, |
|
"loss": 0.839, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.2460686878852686, |
|
"grad_norm": 0.6730844974517822, |
|
"learning_rate": 1.2952543313240472e-07, |
|
"loss": 0.9139, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.2465718958359542, |
|
"grad_norm": 0.5944534540176392, |
|
"learning_rate": 1.0705383806982606e-07, |
|
"loss": 0.7731, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24707510378663983, |
|
"grad_norm": 0.6421075463294983, |
|
"learning_rate": 8.671949076420882e-08, |
|
"loss": 0.8907, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.24757831173732545, |
|
"grad_norm": 0.704002857208252, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 0.8232, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.24808151968801107, |
|
"grad_norm": 0.8100038766860962, |
|
"learning_rate": 5.246593205699424e-08, |
|
"loss": 0.8716, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.24858472763869668, |
|
"grad_norm": 0.7674500942230225, |
|
"learning_rate": 3.8548187963854956e-08, |
|
"loss": 0.8986, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.2490879355893823, |
|
"grad_norm": 0.7667043209075928, |
|
"learning_rate": 2.6770626181715773e-08, |
|
"loss": 1.008, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.24959114354006792, |
|
"grad_norm": 0.891828179359436, |
|
"learning_rate": 1.7133751222137007e-08, |
|
"loss": 0.9345, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.25009435149075354, |
|
"grad_norm": 0.8518345952033997, |
|
"learning_rate": 9.637975896759077e-09, |
|
"loss": 0.968, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.2505975594414392, |
|
"grad_norm": 1.0205941200256348, |
|
"learning_rate": 4.2836212996499865e-09, |
|
"loss": 0.8592, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.2511007673921248, |
|
"grad_norm": 1.1440129280090332, |
|
"learning_rate": 1.0709167935385455e-09, |
|
"loss": 0.9679, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.2516039753428104, |
|
"grad_norm": 1.352176547050476, |
|
"learning_rate": 0.0, |
|
"loss": 0.8527, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2516039753428104, |
|
"eval_loss": 0.8061292767524719, |
|
"eval_runtime": 104.5326, |
|
"eval_samples_per_second": 32.019, |
|
"eval_steps_per_second": 8.007, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4280794742718464e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|