|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.992407324698526, |
|
"eval_steps": 500, |
|
"global_step": 837, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01786511835640911, |
|
"grad_norm": 0.5766569375991821, |
|
"learning_rate": 4.999559763441755e-05, |
|
"loss": 1.5453, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03573023671281822, |
|
"grad_norm": 0.4148772656917572, |
|
"learning_rate": 4.998239208813602e-05, |
|
"loss": 1.4724, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.053595355069227336, |
|
"grad_norm": 0.3131537139415741, |
|
"learning_rate": 4.9960388012006784e-05, |
|
"loss": 1.2985, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07146047342563644, |
|
"grad_norm": 0.26158425211906433, |
|
"learning_rate": 4.992959315562887e-05, |
|
"loss": 1.3255, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08932559178204555, |
|
"grad_norm": 0.23404298722743988, |
|
"learning_rate": 4.9890018364619516e-05, |
|
"loss": 1.245, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10719071013845467, |
|
"grad_norm": 0.19169265031814575, |
|
"learning_rate": 4.984167757679458e-05, |
|
"loss": 1.2509, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12505582849486377, |
|
"grad_norm": 0.29275861382484436, |
|
"learning_rate": 4.9784587817259674e-05, |
|
"loss": 1.2937, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14292094685127288, |
|
"grad_norm": 0.1952601969242096, |
|
"learning_rate": 4.971876919241422e-05, |
|
"loss": 1.1818, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.160786065207682, |
|
"grad_norm": 0.223506361246109, |
|
"learning_rate": 4.964424488287009e-05, |
|
"loss": 1.2293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1786511835640911, |
|
"grad_norm": 0.23478031158447266, |
|
"learning_rate": 4.956104113528776e-05, |
|
"loss": 1.2531, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19651630192050024, |
|
"grad_norm": 0.26763561367988586, |
|
"learning_rate": 4.9469187253132356e-05, |
|
"loss": 1.2131, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.21438142027690935, |
|
"grad_norm": 0.2691376507282257, |
|
"learning_rate": 4.936871558635346e-05, |
|
"loss": 1.2497, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.23224653863331846, |
|
"grad_norm": 0.2545244097709656, |
|
"learning_rate": 4.925966151999168e-05, |
|
"loss": 1.1778, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.25011165698972754, |
|
"grad_norm": 0.30710119009017944, |
|
"learning_rate": 4.914206346171651e-05, |
|
"loss": 1.2123, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2679767753461367, |
|
"grad_norm": 0.23636460304260254, |
|
"learning_rate": 4.901596282829948e-05, |
|
"loss": 1.2007, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.28584189370254576, |
|
"grad_norm": 0.2662898600101471, |
|
"learning_rate": 4.888140403102768e-05, |
|
"loss": 1.1745, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3037070120589549, |
|
"grad_norm": 0.308378130197525, |
|
"learning_rate": 4.8738434460062524e-05, |
|
"loss": 1.1981, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.321572130415364, |
|
"grad_norm": 0.23143641650676727, |
|
"learning_rate": 4.858710446774951e-05, |
|
"loss": 1.1297, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3394372487717731, |
|
"grad_norm": 0.2781310975551605, |
|
"learning_rate": 4.842746735088461e-05, |
|
"loss": 1.2359, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3573023671281822, |
|
"grad_norm": 0.29989567399024963, |
|
"learning_rate": 4.825957933194376e-05, |
|
"loss": 1.1798, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.37516748548459133, |
|
"grad_norm": 0.3551138937473297, |
|
"learning_rate": 4.808349953928184e-05, |
|
"loss": 1.2055, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.39303260384100047, |
|
"grad_norm": 0.3111821413040161, |
|
"learning_rate": 4.789928998630838e-05, |
|
"loss": 1.2145, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.41089772219740955, |
|
"grad_norm": 0.27980926632881165, |
|
"learning_rate": 4.770701554964706e-05, |
|
"loss": 1.1673, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4287628405538187, |
|
"grad_norm": 0.2745010256767273, |
|
"learning_rate": 4.750674394628687e-05, |
|
"loss": 1.1494, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4466279589102278, |
|
"grad_norm": 0.2852526307106018, |
|
"learning_rate": 4.729854570973289e-05, |
|
"loss": 1.1219, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4644930772666369, |
|
"grad_norm": 0.35505807399749756, |
|
"learning_rate": 4.7082494165165216e-05, |
|
"loss": 1.1953, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.482358195623046, |
|
"grad_norm": 0.32421571016311646, |
|
"learning_rate": 4.685866540361456e-05, |
|
"loss": 1.0871, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5002233139794551, |
|
"grad_norm": 0.27323105931282043, |
|
"learning_rate": 4.662713825516379e-05, |
|
"loss": 1.1604, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5180884323358642, |
|
"grad_norm": 0.3120843470096588, |
|
"learning_rate": 4.638799426118492e-05, |
|
"loss": 1.1202, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5359535506922734, |
|
"grad_norm": 0.26333290338516235, |
|
"learning_rate": 4.6141317645621e-05, |
|
"loss": 1.1539, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5538186690486825, |
|
"grad_norm": 0.270474374294281, |
|
"learning_rate": 4.588719528532342e-05, |
|
"loss": 1.1343, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5716837874050915, |
|
"grad_norm": 0.2665104866027832, |
|
"learning_rate": 4.5625716679454787e-05, |
|
"loss": 1.2117, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5895489057615007, |
|
"grad_norm": 0.29763564467430115, |
|
"learning_rate": 4.535697391796832e-05, |
|
"loss": 1.1323, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6074140241179098, |
|
"grad_norm": 0.36548730731010437, |
|
"learning_rate": 4.50810616491747e-05, |
|
"loss": 1.1848, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6252791424743189, |
|
"grad_norm": 0.32450002431869507, |
|
"learning_rate": 4.479807704640802e-05, |
|
"loss": 1.1829, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.643144260830728, |
|
"grad_norm": 0.4330536425113678, |
|
"learning_rate": 4.45081197738023e-05, |
|
"loss": 1.1881, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6610093791871371, |
|
"grad_norm": 0.3612164258956909, |
|
"learning_rate": 4.421129195119094e-05, |
|
"loss": 1.23, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6788744975435462, |
|
"grad_norm": 0.3616299033164978, |
|
"learning_rate": 4.390769811814116e-05, |
|
"loss": 1.1593, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6967396158999554, |
|
"grad_norm": 0.3271953761577606, |
|
"learning_rate": 4.359744519713628e-05, |
|
"loss": 1.1454, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7146047342563644, |
|
"grad_norm": 0.3824130594730377, |
|
"learning_rate": 4.3280642455918806e-05, |
|
"loss": 1.1633, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7324698526127735, |
|
"grad_norm": 0.36359068751335144, |
|
"learning_rate": 4.2957401469007495e-05, |
|
"loss": 1.1841, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7503349709691827, |
|
"grad_norm": 0.41856512427330017, |
|
"learning_rate": 4.262783607840199e-05, |
|
"loss": 1.1567, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7682000893255918, |
|
"grad_norm": 0.2762792408466339, |
|
"learning_rate": 4.229206235348891e-05, |
|
"loss": 1.1147, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7860652076820009, |
|
"grad_norm": 0.37006062269210815, |
|
"learning_rate": 4.195019855016346e-05, |
|
"loss": 1.1194, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.80393032603841, |
|
"grad_norm": 0.33580338954925537, |
|
"learning_rate": 4.160236506918098e-05, |
|
"loss": 1.1884, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8217954443948191, |
|
"grad_norm": 0.33786651492118835, |
|
"learning_rate": 4.124868441375307e-05, |
|
"loss": 1.1687, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8396605627512282, |
|
"grad_norm": 0.3432258665561676, |
|
"learning_rate": 4.08892811464033e-05, |
|
"loss": 1.1512, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8575256811076374, |
|
"grad_norm": 0.3321971595287323, |
|
"learning_rate": 4.052428184509762e-05, |
|
"loss": 1.1417, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8753907994640464, |
|
"grad_norm": 0.371055543422699, |
|
"learning_rate": 4.0153815058664976e-05, |
|
"loss": 1.1595, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8932559178204555, |
|
"grad_norm": 0.3127325773239136, |
|
"learning_rate": 3.977801126152376e-05, |
|
"loss": 1.1661, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9111210361768647, |
|
"grad_norm": 0.28069669008255005, |
|
"learning_rate": 3.9397002807730166e-05, |
|
"loss": 1.1271, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9289861545332738, |
|
"grad_norm": 0.39312201738357544, |
|
"learning_rate": 3.9010923884364467e-05, |
|
"loss": 1.0852, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9468512728896828, |
|
"grad_norm": 0.44475504755973816, |
|
"learning_rate": 3.861991046427182e-05, |
|
"loss": 1.0884, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.964716391246092, |
|
"grad_norm": 0.3146402835845947, |
|
"learning_rate": 3.822410025817406e-05, |
|
"loss": 1.1159, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9825815096025011, |
|
"grad_norm": 0.4495191276073456, |
|
"learning_rate": 3.782363266616946e-05, |
|
"loss": 1.1642, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.0013398838767307, |
|
"grad_norm": 0.30039986968040466, |
|
"learning_rate": 3.741864872863754e-05, |
|
"loss": 1.1599, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0192050022331398, |
|
"grad_norm": 0.3826463222503662, |
|
"learning_rate": 3.700929107656614e-05, |
|
"loss": 1.1167, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.037070120589549, |
|
"grad_norm": 0.38993003964424133, |
|
"learning_rate": 3.659570388131832e-05, |
|
"loss": 1.087, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.054935238945958, |
|
"grad_norm": 0.4054009020328522, |
|
"learning_rate": 3.61780328038568e-05, |
|
"loss": 1.0912, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0728003573023672, |
|
"grad_norm": 0.47545984387397766, |
|
"learning_rate": 3.575642494344365e-05, |
|
"loss": 1.0991, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0906654756587761, |
|
"grad_norm": 0.4191313683986664, |
|
"learning_rate": 3.533102878583361e-05, |
|
"loss": 1.1219, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.1085305940151853, |
|
"grad_norm": 0.3124043643474579, |
|
"learning_rate": 3.490199415097892e-05, |
|
"loss": 1.1198, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.1263957123715944, |
|
"grad_norm": 0.36894044280052185, |
|
"learning_rate": 3.44694721402644e-05, |
|
"loss": 1.1193, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.1442608307280036, |
|
"grad_norm": 0.5683015584945679, |
|
"learning_rate": 3.4033615083291135e-05, |
|
"loss": 1.142, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1621259490844127, |
|
"grad_norm": 0.371493399143219, |
|
"learning_rate": 3.3594576484227655e-05, |
|
"loss": 1.128, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1799910674408218, |
|
"grad_norm": 0.38947540521621704, |
|
"learning_rate": 3.315251096774737e-05, |
|
"loss": 1.1046, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.197856185797231, |
|
"grad_norm": 0.3288847804069519, |
|
"learning_rate": 3.2707574224571495e-05, |
|
"loss": 1.1209, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.21572130415364, |
|
"grad_norm": 0.36680731177330017, |
|
"learning_rate": 3.225992295663639e-05, |
|
"loss": 1.0932, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.233586422510049, |
|
"grad_norm": 0.4268665909767151, |
|
"learning_rate": 3.1809714821904834e-05, |
|
"loss": 1.1161, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.2514515408664582, |
|
"grad_norm": 0.37560388445854187, |
|
"learning_rate": 3.1357108378840616e-05, |
|
"loss": 1.1264, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.2693166592228673, |
|
"grad_norm": 0.35783258080482483, |
|
"learning_rate": 3.0902263030565925e-05, |
|
"loss": 1.1226, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2871817775792764, |
|
"grad_norm": 0.3974683880805969, |
|
"learning_rate": 3.0445338968721287e-05, |
|
"loss": 1.0651, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.3050468959356856, |
|
"grad_norm": 0.36973434686660767, |
|
"learning_rate": 2.9986497117047797e-05, |
|
"loss": 1.1052, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.3229120142920947, |
|
"grad_norm": 0.40590983629226685, |
|
"learning_rate": 2.9525899074711506e-05, |
|
"loss": 1.0979, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.3407771326485038, |
|
"grad_norm": 0.3565100431442261, |
|
"learning_rate": 2.906370705938991e-05, |
|
"loss": 1.1254, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.358642251004913, |
|
"grad_norm": 0.4463523328304291, |
|
"learning_rate": 2.8600083850140692e-05, |
|
"loss": 1.1275, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3765073693613221, |
|
"grad_norm": 0.4577709436416626, |
|
"learning_rate": 2.8135192730072598e-05, |
|
"loss": 1.0984, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3943724877177313, |
|
"grad_norm": 0.3673183023929596, |
|
"learning_rate": 2.7669197428838972e-05, |
|
"loss": 1.1126, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.4122376060741402, |
|
"grad_norm": 0.3968072235584259, |
|
"learning_rate": 2.7202262064973873e-05, |
|
"loss": 1.1271, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.4301027244305493, |
|
"grad_norm": 0.5759745240211487, |
|
"learning_rate": 2.6734551088091293e-05, |
|
"loss": 1.1135, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.4479678427869584, |
|
"grad_norm": 0.4903964698314667, |
|
"learning_rate": 2.6266229220967818e-05, |
|
"loss": 1.1163, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.4658329611433676, |
|
"grad_norm": 0.4729979932308197, |
|
"learning_rate": 2.5797461401529e-05, |
|
"loss": 1.1011, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4836980794997767, |
|
"grad_norm": 0.45003455877304077, |
|
"learning_rate": 2.5328412724759976e-05, |
|
"loss": 1.1506, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.5015631978561856, |
|
"grad_norm": 0.4313727021217346, |
|
"learning_rate": 2.485924838456086e-05, |
|
"loss": 1.1257, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.5194283162125948, |
|
"grad_norm": 0.39038538932800293, |
|
"learning_rate": 2.4390133615567136e-05, |
|
"loss": 1.0858, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.537293434569004, |
|
"grad_norm": 0.49707311391830444, |
|
"learning_rate": 2.3921233634955868e-05, |
|
"loss": 1.1602, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.555158552925413, |
|
"grad_norm": 0.46587345004081726, |
|
"learning_rate": 2.3452713584257955e-05, |
|
"loss": 1.1144, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.5730236712818222, |
|
"grad_norm": 0.37671202421188354, |
|
"learning_rate": 2.2984738471197077e-05, |
|
"loss": 1.1793, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5908887896382313, |
|
"grad_norm": 0.4020828306674957, |
|
"learning_rate": 2.2517473111575725e-05, |
|
"loss": 1.1206, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.6087539079946405, |
|
"grad_norm": 0.5164027214050293, |
|
"learning_rate": 2.2051082071228854e-05, |
|
"loss": 1.1177, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6266190263510496, |
|
"grad_norm": 0.3768448829650879, |
|
"learning_rate": 2.1585729608065595e-05, |
|
"loss": 1.0802, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.6444841447074587, |
|
"grad_norm": 0.3956853747367859, |
|
"learning_rate": 2.1121579614219345e-05, |
|
"loss": 1.0913, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.6623492630638679, |
|
"grad_norm": 0.36919939517974854, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 1.0896, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.680214381420277, |
|
"grad_norm": 0.45556601881980896, |
|
"learning_rate": 2.0197540427955777e-05, |
|
"loss": 1.1183, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6980794997766862, |
|
"grad_norm": 0.47595128417015076, |
|
"learning_rate": 1.97379766722033e-05, |
|
"loss": 1.2037, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.7159446181330953, |
|
"grad_norm": 0.3943460285663605, |
|
"learning_rate": 1.928026614448221e-05, |
|
"loss": 1.0682, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7338097364895042, |
|
"grad_norm": 0.40548044443130493, |
|
"learning_rate": 1.8824570045518423e-05, |
|
"loss": 1.0664, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.7516748548459133, |
|
"grad_norm": 0.4159083962440491, |
|
"learning_rate": 1.8371048866577713e-05, |
|
"loss": 1.1475, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.7695399732023225, |
|
"grad_norm": 0.4973352551460266, |
|
"learning_rate": 1.79198623329424e-05, |
|
"loss": 1.0675, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.7874050915587316, |
|
"grad_norm": 0.4413686692714691, |
|
"learning_rate": 1.747116934765785e-05, |
|
"loss": 1.0956, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.8052702099151405, |
|
"grad_norm": 0.4255582094192505, |
|
"learning_rate": 1.70251279355685e-05, |
|
"loss": 1.071, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.8231353282715497, |
|
"grad_norm": 0.7180905938148499, |
|
"learning_rate": 1.658189518766322e-05, |
|
"loss": 1.1428, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8410004466279588, |
|
"grad_norm": 0.6441544890403748, |
|
"learning_rate": 1.6141627205749563e-05, |
|
"loss": 1.1014, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.858865564984368, |
|
"grad_norm": 0.415781170129776, |
|
"learning_rate": 1.5704479047476377e-05, |
|
"loss": 1.1245, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.876730683340777, |
|
"grad_norm": 0.40880969166755676, |
|
"learning_rate": 1.5270604671724188e-05, |
|
"loss": 1.0354, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.8945958016971862, |
|
"grad_norm": 0.5105963349342346, |
|
"learning_rate": 1.4840156884382491e-05, |
|
"loss": 1.0717, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.9124609200535954, |
|
"grad_norm": 0.6085048317909241, |
|
"learning_rate": 1.4413287284533228e-05, |
|
"loss": 1.1494, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.9303260384100045, |
|
"grad_norm": 0.6034563183784485, |
|
"learning_rate": 1.399014621105914e-05, |
|
"loss": 1.1347, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.9481911567664136, |
|
"grad_norm": 0.5377976298332214, |
|
"learning_rate": 1.3570882689696127e-05, |
|
"loss": 1.1074, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.9660562751228228, |
|
"grad_norm": 0.6293488144874573, |
|
"learning_rate": 1.3155644380547877e-05, |
|
"loss": 1.1264, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.983921393479232, |
|
"grad_norm": 0.45377108454704285, |
|
"learning_rate": 1.2744577526081666e-05, |
|
"loss": 1.0915, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.0026797677534613, |
|
"grad_norm": 0.38156697154045105, |
|
"learning_rate": 1.2337826899623265e-05, |
|
"loss": 1.1034, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.0205448861098705, |
|
"grad_norm": 0.5213829278945923, |
|
"learning_rate": 1.193553575436935e-05, |
|
"loss": 1.0915, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.0384100044662796, |
|
"grad_norm": 0.46200066804885864, |
|
"learning_rate": 1.1537845772935279e-05, |
|
"loss": 1.0797, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.0562751228226888, |
|
"grad_norm": 0.6238696575164795, |
|
"learning_rate": 1.1144897017456e-05, |
|
"loss": 1.086, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.074140241179098, |
|
"grad_norm": 0.45493337512016296, |
|
"learning_rate": 1.0756827880257661e-05, |
|
"loss": 1.1068, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.092005359535507, |
|
"grad_norm": 0.535967230796814, |
|
"learning_rate": 1.0373775035117305e-05, |
|
"loss": 1.0574, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.109870477891916, |
|
"grad_norm": 0.42889833450317383, |
|
"learning_rate": 9.995873389127895e-06, |
|
"loss": 1.1409, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.1277355962483253, |
|
"grad_norm": 0.3753083646297455, |
|
"learning_rate": 9.623256035185382e-06, |
|
"loss": 1.1461, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.1456007146047344, |
|
"grad_norm": 0.508271336555481, |
|
"learning_rate": 9.256054205114939e-06, |
|
"loss": 1.1039, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.163465832961143, |
|
"grad_norm": 0.45634639263153076, |
|
"learning_rate": 8.894397223452453e-06, |
|
"loss": 1.0832, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.1813309513175523, |
|
"grad_norm": 0.557949423789978, |
|
"learning_rate": 8.538412461897807e-06, |
|
"loss": 1.0621, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.1991960696739614, |
|
"grad_norm": 0.5253410935401917, |
|
"learning_rate": 8.188225294455992e-06, |
|
"loss": 1.1078, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.2170611880303706, |
|
"grad_norm": 0.4432696998119354, |
|
"learning_rate": 7.843959053281663e-06, |
|
"loss": 1.114, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.2349263063867797, |
|
"grad_norm": 0.4199691712856293, |
|
"learning_rate": 7.505734985242927e-06, |
|
"loss": 1.0363, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.252791424743189, |
|
"grad_norm": 0.4136619567871094, |
|
"learning_rate": 7.173672209219495e-06, |
|
"loss": 1.0577, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.270656543099598, |
|
"grad_norm": 0.589055061340332, |
|
"learning_rate": 6.847887674150266e-06, |
|
"loss": 1.1141, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.288521661456007, |
|
"grad_norm": 0.42672625184059143, |
|
"learning_rate": 6.528496117845215e-06, |
|
"loss": 1.0775, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.3063867798124162, |
|
"grad_norm": 0.6587265133857727, |
|
"learning_rate": 6.215610026575916e-06, |
|
"loss": 1.1016, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.3242518981688254, |
|
"grad_norm": 0.4289712607860565, |
|
"learning_rate": 5.909339595459123e-06, |
|
"loss": 1.0545, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.3421170165252345, |
|
"grad_norm": 0.4478652775287628, |
|
"learning_rate": 5.609792689647222e-06, |
|
"loss": 1.1381, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.3599821348816437, |
|
"grad_norm": 0.529575765132904, |
|
"learning_rate": 5.317074806339295e-06, |
|
"loss": 1.0688, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.377847253238053, |
|
"grad_norm": 0.5171645879745483, |
|
"learning_rate": 5.0312890376261415e-06, |
|
"loss": 1.0419, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.395712371594462, |
|
"grad_norm": 0.5134549140930176, |
|
"learning_rate": 4.752536034182312e-06, |
|
"loss": 1.0742, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.413577489950871, |
|
"grad_norm": 0.36803004145622253, |
|
"learning_rate": 4.480913969818098e-06, |
|
"loss": 1.0799, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.43144260830728, |
|
"grad_norm": 0.5440959334373474, |
|
"learning_rate": 4.216518506903683e-06, |
|
"loss": 1.0485, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.4493077266636893, |
|
"grad_norm": 0.5819982290267944, |
|
"learning_rate": 3.959442762677967e-06, |
|
"loss": 1.0833, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.467172845020098, |
|
"grad_norm": 0.4522368609905243, |
|
"learning_rate": 3.70977727645363e-06, |
|
"loss": 1.0997, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.485037963376507, |
|
"grad_norm": 0.4565070569515228, |
|
"learning_rate": 3.467609977730155e-06, |
|
"loss": 1.0652, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.5029030817329163, |
|
"grad_norm": 0.46055707335472107, |
|
"learning_rate": 3.233026155226046e-06, |
|
"loss": 1.0679, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.5207682000893255, |
|
"grad_norm": 0.811050534248352, |
|
"learning_rate": 3.0061084268410006e-06, |
|
"loss": 1.1097, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.5386333184457346, |
|
"grad_norm": 0.4883212447166443, |
|
"learning_rate": 2.786936710558821e-06, |
|
"loss": 1.1183, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.5564984368021437, |
|
"grad_norm": 0.6575244069099426, |
|
"learning_rate": 2.575588196301146e-06, |
|
"loss": 1.0509, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.574363555158553, |
|
"grad_norm": 0.4288465082645416, |
|
"learning_rate": 2.372137318741968e-06, |
|
"loss": 1.0662, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.592228673514962, |
|
"grad_norm": 0.628170907497406, |
|
"learning_rate": 2.1766557310925916e-06, |
|
"loss": 1.0352, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.610093791871371, |
|
"grad_norm": 0.4450254440307617, |
|
"learning_rate": 1.989212279866079e-06, |
|
"loss": 1.0813, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.6279589102277803, |
|
"grad_norm": 0.5844019651412964, |
|
"learning_rate": 1.8098729806303116e-06, |
|
"loss": 1.1297, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.6458240285841894, |
|
"grad_norm": 0.6221369504928589, |
|
"learning_rate": 1.638700994757955e-06, |
|
"loss": 1.0704, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.6636891469405986, |
|
"grad_norm": 0.49307432770729065, |
|
"learning_rate": 1.4757566071817607e-06, |
|
"loss": 1.1033, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.6815542652970077, |
|
"grad_norm": 0.6794072389602661, |
|
"learning_rate": 1.3210972051628328e-06, |
|
"loss": 1.0211, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.699419383653417, |
|
"grad_norm": 0.4681510329246521, |
|
"learning_rate": 1.1747772580794348e-06, |
|
"loss": 1.0676, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.717284502009826, |
|
"grad_norm": 0.4018392264842987, |
|
"learning_rate": 1.0368482982435063e-06, |
|
"loss": 1.0683, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.7351496203662347, |
|
"grad_norm": 0.6274233460426331, |
|
"learning_rate": 9.073589027514789e-07, |
|
"loss": 1.0712, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.7530147387226442, |
|
"grad_norm": 0.4873206615447998, |
|
"learning_rate": 7.863546763760055e-07, |
|
"loss": 1.0447, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.770879857079053, |
|
"grad_norm": 0.6785792708396912, |
|
"learning_rate": 6.738782355044049e-07, |
|
"loss": 1.0881, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.7887449754354625, |
|
"grad_norm": 0.6314323544502258, |
|
"learning_rate": 5.699691931296463e-07, |
|
"loss": 1.0517, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.806610093791871, |
|
"grad_norm": 0.5061508417129517, |
|
"learning_rate": 4.746641448990785e-07, |
|
"loss": 1.0891, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.8244752121482803, |
|
"grad_norm": 0.38295778632164, |
|
"learning_rate": 3.879966562258364e-07, |
|
"loss": 1.1203, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.8423403305046895, |
|
"grad_norm": 0.7023996710777283, |
|
"learning_rate": 3.0999725046745866e-07, |
|
"loss": 1.0624, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.8602054488610986, |
|
"grad_norm": 0.41944465041160583, |
|
"learning_rate": 2.406933981758952e-07, |
|
"loss": 1.0512, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.8780705672175078, |
|
"grad_norm": 0.6278116703033447, |
|
"learning_rate": 1.801095074226683e-07, |
|
"loss": 1.0506, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.895935685573917, |
|
"grad_norm": 0.523618757724762, |
|
"learning_rate": 1.2826691520262114e-07, |
|
"loss": 1.084, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.913800803930326, |
|
"grad_norm": 0.552522599697113, |
|
"learning_rate": 8.518387991924837e-08, |
|
"loss": 1.1039, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.931665922286735, |
|
"grad_norm": 0.5657618045806885, |
|
"learning_rate": 5.0875574954287186e-08, |
|
"loss": 1.0787, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.9495310406431443, |
|
"grad_norm": 0.4286334216594696, |
|
"learning_rate": 2.535408332381417e-08, |
|
"loss": 1.0904, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.9673961589995534, |
|
"grad_norm": 0.5537897348403931, |
|
"learning_rate": 8.62839342274102e-09, |
|
"loss": 1.0673, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.9852612773559626, |
|
"grad_norm": 0.4285455644130707, |
|
"learning_rate": 7.043958591912425e-10, |
|
"loss": 1.0993, |
|
"step": 835 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 837, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.422989809751163e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|