|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 724, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.181818181818181e-06, |
|
"loss": 4.0277, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6363636363636363e-05, |
|
"loss": 3.4357, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.454545454545455e-05, |
|
"loss": 2.1961, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2727272727272725e-05, |
|
"loss": 1.9322, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.090909090909091e-05, |
|
"loss": 1.7744, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.90909090909091e-05, |
|
"loss": 1.5335, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.7272727272727274e-05, |
|
"loss": 1.4407, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.9998798361395565e-05, |
|
"loss": 1.5584, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.9992490021938124e-05, |
|
"loss": 1.3994, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.998077570751456e-05, |
|
"loss": 1.5376, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.996365752956315e-05, |
|
"loss": 1.4137, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.9941138573537655e-05, |
|
"loss": 1.2603, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.991322289835123e-05, |
|
"loss": 1.247, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5.987991553564485e-05, |
|
"loss": 1.3289, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5.984122248888033e-05, |
|
"loss": 1.3926, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 5.979715073225829e-05, |
|
"loss": 1.2806, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.974770820946105e-05, |
|
"loss": 1.2114, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.969290383222086e-05, |
|
"loss": 1.2045, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.96327474787136e-05, |
|
"loss": 1.3095, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.956724999177828e-05, |
|
"loss": 1.2824, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.949642317696271e-05, |
|
"loss": 1.2205, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 5.94202798003956e-05, |
|
"loss": 1.2169, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 5.933883358648552e-05, |
|
"loss": 1.2232, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5.9252099215447206e-05, |
|
"loss": 1.2376, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.916009232065549e-05, |
|
"loss": 1.1796, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.906282948582746e-05, |
|
"loss": 1.2465, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.8960328242033405e-05, |
|
"loss": 1.2715, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.885260706453688e-05, |
|
"loss": 1.1044, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.873968536946467e-05, |
|
"loss": 1.1895, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.862158351030714e-05, |
|
"loss": 1.1314, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.849832277424963e-05, |
|
"loss": 1.1302, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.8369925378335574e-05, |
|
"loss": 1.1484, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.8236414465462e-05, |
|
"loss": 1.2325, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.809781410020814e-05, |
|
"loss": 1.1567, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.795414926449796e-05, |
|
"loss": 1.1823, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.780544585309725e-05, |
|
"loss": 1.0974, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.7651730668946335e-05, |
|
"loss": 1.2043, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.749303141832889e-05, |
|
"loss": 1.2751, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.732937670587814e-05, |
|
"loss": 1.0916, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.716079602942096e-05, |
|
"loss": 1.198, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.698731977466112e-05, |
|
"loss": 1.2031, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.680897920970237e-05, |
|
"loss": 1.2287, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.662580647941262e-05, |
|
"loss": 1.0532, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.643783459962997e-05, |
|
"loss": 1.1101, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.6245097451211754e-05, |
|
"loss": 1.1635, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.604762977392781e-05, |
|
"loss": 1.1305, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.584546716019874e-05, |
|
"loss": 1.1696, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.563864604868061e-05, |
|
"loss": 1.1294, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.5427203717697134e-05, |
|
"loss": 1.1355, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.521117827852039e-05, |
|
"loss": 1.0984, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.499060866850155e-05, |
|
"loss": 1.1907, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.4765534644052603e-05, |
|
"loss": 1.2739, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.453599677348051e-05, |
|
"loss": 1.0823, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.4302036429675e-05, |
|
"loss": 1.0524, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4063695782651316e-05, |
|
"loss": 1.0793, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.3821017791949336e-05, |
|
"loss": 1.0743, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.3574046198890354e-05, |
|
"loss": 1.0322, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.3322825518692984e-05, |
|
"loss": 1.0071, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.306740103244947e-05, |
|
"loss": 1.0521, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.280781877896411e-05, |
|
"loss": 1.1142, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.2544125546455004e-05, |
|
"loss": 1.1505, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.22763688641207e-05, |
|
"loss": 1.1247, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.200459699357337e-05, |
|
"loss": 1.0351, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.1728858920139945e-05, |
|
"loss": 1.0335, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.144920434403274e-05, |
|
"loss": 1.0042, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.1165683671391305e-05, |
|
"loss": 1.1124, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.087834800519701e-05, |
|
"loss": 1.0262, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.0587249136062016e-05, |
|
"loss": 1.0767, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.0292439532894285e-05, |
|
"loss": 1.0406, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9993972333440435e-05, |
|
"loss": 1.0607, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.969190133470789e-05, |
|
"loss": 1.0906, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.9386280983268294e-05, |
|
"loss": 1.089, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.9077166365443846e-05, |
|
"loss": 1.0372, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.876461319737833e-05, |
|
"loss": 1.0891, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.84486778149945e-05, |
|
"loss": 1.064, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.812941716383998e-05, |
|
"loss": 1.0618, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.780688878882304e-05, |
|
"loss": 1.0255, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.748115082384054e-05, |
|
"loss": 1.0841, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.7152261981299595e-05, |
|
"loss": 1.0404, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.682028154153497e-05, |
|
"loss": 1.106, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.648526934212418e-05, |
|
"loss": 1.0418, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.614728576710212e-05, |
|
"loss": 1.0362, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.5806391736077104e-05, |
|
"loss": 1.0673, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.5462648693250564e-05, |
|
"loss": 1.1141, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.5116118596342016e-05, |
|
"loss": 1.0397, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.476686390542155e-05, |
|
"loss": 1.0353, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.4414947571651744e-05, |
|
"loss": 1.124, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.406043302594111e-05, |
|
"loss": 0.9815, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.370338416751103e-05, |
|
"loss": 1.0096, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.3343865352378236e-05, |
|
"loss": 1.0525, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.298194138175509e-05, |
|
"loss": 1.0293, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.261767749036945e-05, |
|
"loss": 1.067, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2251139334706525e-05, |
|
"loss": 0.9939, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.1882392981174704e-05, |
|
"loss": 1.1619, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.151150489419739e-05, |
|
"loss": 1.0914, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.113854192423321e-05, |
|
"loss": 1.0996, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.07635712957266e-05, |
|
"loss": 1.0173, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.0386660594990984e-05, |
|
"loss": 0.9807, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.0007877758026695e-05, |
|
"loss": 0.9586, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.962729105827595e-05, |
|
"loss": 1.0866, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.9244969094316925e-05, |
|
"loss": 1.0217, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.886098077749924e-05, |
|
"loss": 1.125, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.847539531952311e-05, |
|
"loss": 1.0447, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.808828221996432e-05, |
|
"loss": 0.9338, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.7699711253747304e-05, |
|
"loss": 0.9848, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.730975245856866e-05, |
|
"loss": 1.035, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.691847612227321e-05, |
|
"loss": 1.046, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.652595277018502e-05, |
|
"loss": 0.9425, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.613225315239567e-05, |
|
"loss": 1.0042, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.573744823101187e-05, |
|
"loss": 1.061, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5341609167365e-05, |
|
"loss": 0.9957, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.49448073091847e-05, |
|
"loss": 0.9082, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4547114177738776e-05, |
|
"loss": 1.1182, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4148601454941995e-05, |
|
"loss": 1.0315, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.3749340970435756e-05, |
|
"loss": 1.0723, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3349404688641236e-05, |
|
"loss": 1.0899, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.2948864695788215e-05, |
|
"loss": 1.0622, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.2547793186921944e-05, |
|
"loss": 1.0553, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.2146262452890414e-05, |
|
"loss": 1.0151, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.174434486731428e-05, |
|
"loss": 0.9558, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_gen_len": 256.0, |
|
"eval_loss": 1.2119685411453247, |
|
"eval_rouge1": 0.0, |
|
"eval_rouge2": 0.0, |
|
"eval_rougeL": 0.0, |
|
"eval_rougeLsum": 0.0, |
|
"eval_runtime": 769.7103, |
|
"eval_samples_per_second": 3.723, |
|
"eval_steps_per_second": 0.932, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.134211287354203e-05, |
|
"loss": 0.9757, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.093963897159241e-05, |
|
"loss": 0.741, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.053699570508673e-05, |
|
"loss": 0.7769, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.0134255648173302e-05, |
|
"loss": 0.7333, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.9731491392446363e-05, |
|
"loss": 0.794, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.932877553386181e-05, |
|
"loss": 0.815, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.8926180659652284e-05, |
|
"loss": 0.7966, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.8523779335243655e-05, |
|
"loss": 0.7304, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.8121644091175544e-05, |
|
"loss": 0.8445, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.7719847410028125e-05, |
|
"loss": 0.7582, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.731846171335753e-05, |
|
"loss": 0.8207, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.691755934864228e-05, |
|
"loss": 0.7947, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.651721257624309e-05, |
|
"loss": 0.7675, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.6117493556378334e-05, |
|
"loss": 0.712, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.5718474336117575e-05, |
|
"loss": 0.7401, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.5320226836395467e-05, |
|
"loss": 0.8381, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.4922822839048498e-05, |
|
"loss": 0.792, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.4526333973876625e-05, |
|
"loss": 0.7647, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.413083170573249e-05, |
|
"loss": 0.7284, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.373638732164025e-05, |
|
"loss": 0.7642, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.334307191794648e-05, |
|
"loss": 0.7966, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.2950956387505536e-05, |
|
"loss": 0.7109, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.256011140690145e-05, |
|
"loss": 0.7313, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.217060742370889e-05, |
|
"loss": 0.748, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.1782514643795427e-05, |
|
"loss": 0.8344, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.13959030186673e-05, |
|
"loss": 0.7618, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.1010842232861043e-05, |
|
"loss": 0.7686, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.0627401691383272e-05, |
|
"loss": 0.7588, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.0245650507200847e-05, |
|
"loss": 0.7629, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.986565748878359e-05, |
|
"loss": 0.7407, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.9487491127701992e-05, |
|
"loss": 0.8334, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.9111219586282026e-05, |
|
"loss": 0.7257, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.8736910685319207e-05, |
|
"loss": 0.7533, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.8364631891854358e-05, |
|
"loss": 0.7685, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7994450307012992e-05, |
|
"loss": 0.8784, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.762643265391079e-05, |
|
"loss": 0.773, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.7260645265627054e-05, |
|
"loss": 0.7191, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.689715407324862e-05, |
|
"loss": 0.7718, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.6536024593986135e-05, |
|
"loss": 0.7144, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.6177321919364952e-05, |
|
"loss": 0.7948, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5821110703492722e-05, |
|
"loss": 0.7185, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.5467455151405927e-05, |
|
"loss": 0.7758, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.511641900749724e-05, |
|
"loss": 0.8068, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.4768065544025973e-05, |
|
"loss": 0.7279, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.442245754971362e-05, |
|
"loss": 0.7425, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.4079657318426557e-05, |
|
"loss": 0.7319, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3739726637947885e-05, |
|
"loss": 0.7406, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3402726778840592e-05, |
|
"loss": 0.7588, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3068718483403856e-05, |
|
"loss": 0.7623, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2737761954724591e-05, |
|
"loss": 0.7633, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.240991684582619e-05, |
|
"loss": 0.7353, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.2085242248916421e-05, |
|
"loss": 0.7322, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1763796684736342e-05, |
|
"loss": 0.7578, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1445638092012354e-05, |
|
"loss": 0.6986, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1130823817013007e-05, |
|
"loss": 0.7291, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0819410603212712e-05, |
|
"loss": 0.6874, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.051145458106398e-05, |
|
"loss": 0.7529, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0207011257880357e-05, |
|
"loss": 0.7164, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.906135507831401e-06, |
|
"loss": 0.7699, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.608881562052026e-06, |
|
"loss": 0.7755, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.315302998867629e-06, |
|
"loss": 0.7696, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.02545273413686e-06, |
|
"loss": 0.7397, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.739383011713901e-06, |
|
"loss": 0.7269, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.457145394031782e-06, |
|
"loss": 0.7152, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.178790752808538e-06, |
|
"loss": 0.7863, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.904369259877887e-06, |
|
"loss": 0.7739, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.633930378146047e-06, |
|
"loss": 0.7934, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 7.367522852676296e-06, |
|
"loss": 0.7065, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 7.1051947019030035e-06, |
|
"loss": 0.746, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.846993208976548e-06, |
|
"loss": 0.7009, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.592964913240825e-06, |
|
"loss": 0.7566, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.3431556018447365e-06, |
|
"loss": 0.708, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 6.097610301489424e-06, |
|
"loss": 0.7051, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 5.856373270312341e-06, |
|
"loss": 0.7247, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 5.619487989910071e-06, |
|
"loss": 0.7098, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 5.38699715750099e-06, |
|
"loss": 0.7769, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 5.158942678229295e-06, |
|
"loss": 0.7403, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.935365657611912e-06, |
|
"loss": 0.7143, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.716306394129405e-06, |
|
"loss": 0.7083, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.501804371962442e-06, |
|
"loss": 0.7236, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.291898253874972e-06, |
|
"loss": 0.7682, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.086625874245497e-06, |
|
"loss": 0.7458, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.886024232247624e-06, |
|
"loss": 0.8046, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.690129485181201e-06, |
|
"loss": 0.7225, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.4989769419551575e-06, |
|
"loss": 0.7171, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.3126010567232644e-06, |
|
"loss": 0.7202, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.1310354226739957e-06, |
|
"loss": 0.7513, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.9543127659755197e-06, |
|
"loss": 0.7059, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.7824649398770086e-06, |
|
"loss": 0.7157, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.6155229189672757e-06, |
|
"loss": 0.7729, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.4535167935917835e-06, |
|
"loss": 0.7598, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.296475764429007e-06, |
|
"loss": 0.7582, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.144428137227211e-06, |
|
"loss": 0.7523, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.99740131770249e-06, |
|
"loss": 0.7845, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8554218065990246e-06, |
|
"loss": 0.7014, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.7185151949125088e-06, |
|
"loss": 0.6826, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.5867061592774878e-06, |
|
"loss": 0.6805, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.4600184575195486e-06, |
|
"loss": 0.7318, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.3384749243731109e-06, |
|
"loss": 0.7342, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.2220974673655938e-06, |
|
"loss": 0.7293, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1109070628687068e-06, |
|
"loss": 0.6977, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.0049237523175813e-06, |
|
"loss": 0.75, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.041666385984171e-07, |
|
"loss": 0.7534, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.086538826052858e-07, |
|
"loss": 0.7758, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.184026999667537e-07, |
|
"loss": 0.7209, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.33429357942843e-07, |
|
"loss": 0.7222, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 5.537491724929644e-07, |
|
"loss": 0.7198, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.793765055152955e-07, |
|
"loss": 0.7241, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.103247622581441e-07, |
|
"loss": 0.6799, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.466063889036986e-07, |
|
"loss": 0.7607, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.8823287032470835e-07, |
|
"loss": 0.7895, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.352147280143835e-07, |
|
"loss": 0.7417, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8756151818996148e-07, |
|
"loss": 0.7425, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.452818300702452e-07, |
|
"loss": 0.6636, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0838328432745792e-07, |
|
"loss": 0.7328, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.687253171365205e-08, |
|
"loss": 0.7263, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.0755251861954734e-08, |
|
"loss": 0.6789, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.0036152262833404e-08, |
|
"loss": 0.7692, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.4718967415617845e-08, |
|
"loss": 0.729, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.806458155358629e-09, |
|
"loss": 0.7976, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.004111552218358e-10, |
|
"loss": 0.757, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_gen_len": 256.0, |
|
"eval_loss": 1.1889821290969849, |
|
"eval_rouge1": 0.0, |
|
"eval_rouge2": 0.0, |
|
"eval_rougeL": 0.0, |
|
"eval_rougeLsum": 0.0, |
|
"eval_runtime": 773.1817, |
|
"eval_samples_per_second": 3.707, |
|
"eval_steps_per_second": 0.927, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 724, |
|
"total_flos": 1.004106246258688e+17, |
|
"train_loss": 0.9686276566916407, |
|
"train_runtime": 5563.8478, |
|
"train_samples_per_second": 8.328, |
|
"train_steps_per_second": 0.13 |
|
} |
|
], |
|
"max_steps": 724, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.004106246258688e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|