|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0017097670442403, |
|
"eval_steps": 293, |
|
"global_step": 1172, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008548835221201111, |
|
"grad_norm": 14.855524063110352, |
|
"learning_rate": 8.547008547008549e-08, |
|
"loss": 1.7846, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0008548835221201111, |
|
"eval_loss": 1.7640409469604492, |
|
"eval_runtime": 533.0927, |
|
"eval_samples_per_second": 7.421, |
|
"eval_steps_per_second": 3.71, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017097670442402222, |
|
"grad_norm": 11.91721248626709, |
|
"learning_rate": 1.7094017094017097e-07, |
|
"loss": 1.6794, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0025646505663603335, |
|
"grad_norm": 15.487826347351074, |
|
"learning_rate": 2.564102564102564e-07, |
|
"loss": 1.7593, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0034195340884804444, |
|
"grad_norm": 11.996491432189941, |
|
"learning_rate": 3.4188034188034194e-07, |
|
"loss": 1.7245, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004274417610600555, |
|
"grad_norm": 30.804367065429688, |
|
"learning_rate": 4.273504273504274e-07, |
|
"loss": 1.771, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005129301132720667, |
|
"grad_norm": 19.530527114868164, |
|
"learning_rate": 5.128205128205128e-07, |
|
"loss": 1.7888, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005984184654840778, |
|
"grad_norm": 31.70332145690918, |
|
"learning_rate": 5.982905982905984e-07, |
|
"loss": 1.5394, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.006839068176960889, |
|
"grad_norm": 11.850404739379883, |
|
"learning_rate": 6.837606837606839e-07, |
|
"loss": 1.7094, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007693951699081001, |
|
"grad_norm": 24.769758224487305, |
|
"learning_rate": 7.692307692307694e-07, |
|
"loss": 1.5411, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00854883522120111, |
|
"grad_norm": 15.908349990844727, |
|
"learning_rate": 8.547008547008548e-07, |
|
"loss": 1.8037, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009403718743321222, |
|
"grad_norm": 6.722436428070068, |
|
"learning_rate": 9.401709401709402e-07, |
|
"loss": 1.7905, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010258602265441334, |
|
"grad_norm": 4.48321008682251, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.6772, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011113485787561444, |
|
"grad_norm": 4.574745178222656, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 1.8258, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.011968369309681556, |
|
"grad_norm": 4.0335516929626465, |
|
"learning_rate": 1.1965811965811968e-06, |
|
"loss": 1.8022, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.012823252831801668, |
|
"grad_norm": 39.62892532348633, |
|
"learning_rate": 1.282051282051282e-06, |
|
"loss": 1.643, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.013678136353921778, |
|
"grad_norm": 7.48787784576416, |
|
"learning_rate": 1.3675213675213678e-06, |
|
"loss": 1.7202, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01453301987604189, |
|
"grad_norm": 4.110783576965332, |
|
"learning_rate": 1.4529914529914531e-06, |
|
"loss": 1.7974, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.015387903398162001, |
|
"grad_norm": 3.7065269947052, |
|
"learning_rate": 1.5384615384615387e-06, |
|
"loss": 1.765, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016242786920282113, |
|
"grad_norm": 6.430235862731934, |
|
"learning_rate": 1.623931623931624e-06, |
|
"loss": 1.5331, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01709767044240222, |
|
"grad_norm": 21.641319274902344, |
|
"learning_rate": 1.7094017094017097e-06, |
|
"loss": 1.8761, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.017952553964522333, |
|
"grad_norm": 3.3955209255218506, |
|
"learning_rate": 1.794871794871795e-06, |
|
"loss": 1.7766, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.018807437486642445, |
|
"grad_norm": 3.9773988723754883, |
|
"learning_rate": 1.8803418803418804e-06, |
|
"loss": 1.7743, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.019662321008762557, |
|
"grad_norm": 5.062834739685059, |
|
"learning_rate": 1.9658119658119658e-06, |
|
"loss": 1.769, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02051720453088267, |
|
"grad_norm": 4.090948104858398, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.6356, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.021372088053002777, |
|
"grad_norm": 4.175969123840332, |
|
"learning_rate": 2.136752136752137e-06, |
|
"loss": 1.4285, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02222697157512289, |
|
"grad_norm": 2.989772319793701, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 1.5791, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.023081855097243, |
|
"grad_norm": 2.7935218811035156, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 1.7335, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.023936738619363112, |
|
"grad_norm": 2.8618392944335938, |
|
"learning_rate": 2.3931623931623937e-06, |
|
"loss": 1.8338, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.024791622141483224, |
|
"grad_norm": 2.662318229675293, |
|
"learning_rate": 2.478632478632479e-06, |
|
"loss": 1.7555, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.025646505663603335, |
|
"grad_norm": 3.7374916076660156, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 1.4183, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.026501389185723444, |
|
"grad_norm": 4.106424331665039, |
|
"learning_rate": 2.64957264957265e-06, |
|
"loss": 1.7026, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.027356272707843556, |
|
"grad_norm": 2.8097198009490967, |
|
"learning_rate": 2.7350427350427355e-06, |
|
"loss": 1.5934, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.028211156229963667, |
|
"grad_norm": 2.798865795135498, |
|
"learning_rate": 2.8205128205128207e-06, |
|
"loss": 1.8201, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02906603975208378, |
|
"grad_norm": 2.733314037322998, |
|
"learning_rate": 2.9059829059829063e-06, |
|
"loss": 1.7498, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02992092327420389, |
|
"grad_norm": 3.037261962890625, |
|
"learning_rate": 2.9914529914529914e-06, |
|
"loss": 1.6768, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.030775806796324003, |
|
"grad_norm": 2.6975414752960205, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.7676, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.031630690318444114, |
|
"grad_norm": 3.185695171356201, |
|
"learning_rate": 3.1623931623931626e-06, |
|
"loss": 1.719, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.032485573840564226, |
|
"grad_norm": 2.5215115547180176, |
|
"learning_rate": 3.247863247863248e-06, |
|
"loss": 1.7738, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03334045736268433, |
|
"grad_norm": 2.78493332862854, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.6016, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03419534088480444, |
|
"grad_norm": 2.629739999771118, |
|
"learning_rate": 3.4188034188034193e-06, |
|
"loss": 1.6574, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035050224406924554, |
|
"grad_norm": 3.270066976547241, |
|
"learning_rate": 3.5042735042735045e-06, |
|
"loss": 1.7667, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.035905107929044666, |
|
"grad_norm": 2.714362859725952, |
|
"learning_rate": 3.58974358974359e-06, |
|
"loss": 1.5054, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03675999145116478, |
|
"grad_norm": 3.978623151779175, |
|
"learning_rate": 3.6752136752136756e-06, |
|
"loss": 1.6212, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03761487497328489, |
|
"grad_norm": 2.5128977298736572, |
|
"learning_rate": 3.760683760683761e-06, |
|
"loss": 1.4326, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.038469758495405, |
|
"grad_norm": 2.9654622077941895, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 1.7595, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03932464201752511, |
|
"grad_norm": 3.1219539642333984, |
|
"learning_rate": 3.9316239316239315e-06, |
|
"loss": 1.9805, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.040179525539645225, |
|
"grad_norm": 2.5867958068847656, |
|
"learning_rate": 4.017094017094018e-06, |
|
"loss": 1.8051, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04103440906176534, |
|
"grad_norm": 2.4655587673187256, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.6707, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04188929258388545, |
|
"grad_norm": 3.301828384399414, |
|
"learning_rate": 4.188034188034188e-06, |
|
"loss": 1.4957, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04274417610600555, |
|
"grad_norm": 3.0518338680267334, |
|
"learning_rate": 4.273504273504274e-06, |
|
"loss": 1.623, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.043599059628125665, |
|
"grad_norm": 2.4164023399353027, |
|
"learning_rate": 4.358974358974359e-06, |
|
"loss": 1.694, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04445394315024578, |
|
"grad_norm": 2.7269129753112793, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.571, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04530882667236589, |
|
"grad_norm": 2.5209498405456543, |
|
"learning_rate": 4.5299145299145306e-06, |
|
"loss": 1.7713, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.046163710194486, |
|
"grad_norm": 2.6886680126190186, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 1.6275, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04701859371660611, |
|
"grad_norm": 2.4499754905700684, |
|
"learning_rate": 4.700854700854701e-06, |
|
"loss": 1.6943, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.047873477238726224, |
|
"grad_norm": 2.6294198036193848, |
|
"learning_rate": 4.786324786324787e-06, |
|
"loss": 1.6578, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.048728360760846336, |
|
"grad_norm": 2.2285401821136475, |
|
"learning_rate": 4.871794871794872e-06, |
|
"loss": 1.5894, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04958324428296645, |
|
"grad_norm": 2.875617027282715, |
|
"learning_rate": 4.957264957264958e-06, |
|
"loss": 1.57, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05043812780508656, |
|
"grad_norm": 6.192116737365723, |
|
"learning_rate": 5.042735042735043e-06, |
|
"loss": 1.5363, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05129301132720667, |
|
"grad_norm": 5.477021217346191, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 1.5987, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.052147894849326776, |
|
"grad_norm": 3.1479716300964355, |
|
"learning_rate": 5.213675213675214e-06, |
|
"loss": 1.578, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05300277837144689, |
|
"grad_norm": 2.7842800617218018, |
|
"learning_rate": 5.2991452991453e-06, |
|
"loss": 1.6855, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.053857661893567, |
|
"grad_norm": 2.487064838409424, |
|
"learning_rate": 5.384615384615385e-06, |
|
"loss": 1.7421, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05471254541568711, |
|
"grad_norm": 2.895413875579834, |
|
"learning_rate": 5.470085470085471e-06, |
|
"loss": 1.4335, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05556742893780722, |
|
"grad_norm": 2.683014154434204, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.5071, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.056422312459927335, |
|
"grad_norm": 2.4958949089050293, |
|
"learning_rate": 5.641025641025641e-06, |
|
"loss": 1.594, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.057277195982047446, |
|
"grad_norm": 2.551091432571411, |
|
"learning_rate": 5.726495726495727e-06, |
|
"loss": 1.5684, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05813207950416756, |
|
"grad_norm": 2.249833106994629, |
|
"learning_rate": 5.8119658119658126e-06, |
|
"loss": 1.5948, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05898696302628767, |
|
"grad_norm": 2.3720970153808594, |
|
"learning_rate": 5.897435897435898e-06, |
|
"loss": 1.7099, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05984184654840778, |
|
"grad_norm": 2.520702600479126, |
|
"learning_rate": 5.982905982905983e-06, |
|
"loss": 1.4667, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06069673007052789, |
|
"grad_norm": 2.6141464710235596, |
|
"learning_rate": 6.0683760683760684e-06, |
|
"loss": 1.4677, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.061551613592648005, |
|
"grad_norm": 2.9519152641296387, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 1.6429, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06240649711476811, |
|
"grad_norm": 3.462202787399292, |
|
"learning_rate": 6.23931623931624e-06, |
|
"loss": 1.495, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06326138063688823, |
|
"grad_norm": 2.5748167037963867, |
|
"learning_rate": 6.324786324786325e-06, |
|
"loss": 1.7717, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06411626415900834, |
|
"grad_norm": 3.142380475997925, |
|
"learning_rate": 6.410256410256412e-06, |
|
"loss": 1.6701, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06497114768112845, |
|
"grad_norm": 2.6841933727264404, |
|
"learning_rate": 6.495726495726496e-06, |
|
"loss": 1.722, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06582603120324856, |
|
"grad_norm": 2.4726481437683105, |
|
"learning_rate": 6.581196581196582e-06, |
|
"loss": 1.7228, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06668091472536866, |
|
"grad_norm": 2.7335448265075684, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5682, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06753579824748877, |
|
"grad_norm": 2.577003002166748, |
|
"learning_rate": 6.752136752136753e-06, |
|
"loss": 1.7154, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06839068176960889, |
|
"grad_norm": 2.7609519958496094, |
|
"learning_rate": 6.837606837606839e-06, |
|
"loss": 1.4844, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.069245565291729, |
|
"grad_norm": 3.498229503631592, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 1.4114, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07010044881384911, |
|
"grad_norm": 2.5216593742370605, |
|
"learning_rate": 7.008547008547009e-06, |
|
"loss": 1.5511, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07095533233596922, |
|
"grad_norm": 2.2648119926452637, |
|
"learning_rate": 7.0940170940170945e-06, |
|
"loss": 1.8513, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07181021585808933, |
|
"grad_norm": 2.869992256164551, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 1.4821, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07266509938020944, |
|
"grad_norm": 4.656053066253662, |
|
"learning_rate": 7.264957264957266e-06, |
|
"loss": 1.621, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07351998290232956, |
|
"grad_norm": 2.4541072845458984, |
|
"learning_rate": 7.350427350427351e-06, |
|
"loss": 1.5993, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07437486642444967, |
|
"grad_norm": 2.7510995864868164, |
|
"learning_rate": 7.435897435897437e-06, |
|
"loss": 1.649, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07522974994656978, |
|
"grad_norm": 2.552407741546631, |
|
"learning_rate": 7.521367521367522e-06, |
|
"loss": 1.605, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07608463346868989, |
|
"grad_norm": 2.482236862182617, |
|
"learning_rate": 7.606837606837607e-06, |
|
"loss": 1.4649, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07693951699081, |
|
"grad_norm": 2.5153439044952393, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 1.6409, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07779440051293011, |
|
"grad_norm": 2.774604082107544, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 1.771, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07864928403505023, |
|
"grad_norm": 2.6241939067840576, |
|
"learning_rate": 7.863247863247863e-06, |
|
"loss": 1.5099, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07950416755717034, |
|
"grad_norm": 2.3177523612976074, |
|
"learning_rate": 7.948717948717949e-06, |
|
"loss": 1.6473, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08035905107929045, |
|
"grad_norm": 3.034937620162964, |
|
"learning_rate": 8.034188034188036e-06, |
|
"loss": 1.7484, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08121393460141056, |
|
"grad_norm": 2.1960113048553467, |
|
"learning_rate": 8.11965811965812e-06, |
|
"loss": 1.7495, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08206881812353067, |
|
"grad_norm": 2.538083791732788, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 1.6463, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08292370164565079, |
|
"grad_norm": 2.5417184829711914, |
|
"learning_rate": 8.290598290598293e-06, |
|
"loss": 1.7242, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0837785851677709, |
|
"grad_norm": 2.7011282444000244, |
|
"learning_rate": 8.376068376068377e-06, |
|
"loss": 1.4753, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08463346868989101, |
|
"grad_norm": 2.4932761192321777, |
|
"learning_rate": 8.461538461538462e-06, |
|
"loss": 1.6388, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0854883522120111, |
|
"grad_norm": 3.0729198455810547, |
|
"learning_rate": 8.547008547008548e-06, |
|
"loss": 1.6646, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08634323573413122, |
|
"grad_norm": 3.0038278102874756, |
|
"learning_rate": 8.632478632478633e-06, |
|
"loss": 1.5641, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.08719811925625133, |
|
"grad_norm": 2.3090646266937256, |
|
"learning_rate": 8.717948717948719e-06, |
|
"loss": 1.7689, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.08805300277837144, |
|
"grad_norm": 2.393385171890259, |
|
"learning_rate": 8.803418803418804e-06, |
|
"loss": 1.667, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.08890788630049155, |
|
"grad_norm": 2.366182565689087, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.6058, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.08976276982261167, |
|
"grad_norm": 2.586094617843628, |
|
"learning_rate": 8.974358974358976e-06, |
|
"loss": 1.4245, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09061765334473178, |
|
"grad_norm": 2.889852285385132, |
|
"learning_rate": 9.059829059829061e-06, |
|
"loss": 1.4404, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09147253686685189, |
|
"grad_norm": 2.264047622680664, |
|
"learning_rate": 9.145299145299145e-06, |
|
"loss": 1.6835, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.092327420388972, |
|
"grad_norm": 2.287929058074951, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 1.659, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09318230391109211, |
|
"grad_norm": 2.4839229583740234, |
|
"learning_rate": 9.316239316239318e-06, |
|
"loss": 1.6764, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09403718743321222, |
|
"grad_norm": 2.53434419631958, |
|
"learning_rate": 9.401709401709402e-06, |
|
"loss": 1.5596, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09489207095533234, |
|
"grad_norm": 2.017806053161621, |
|
"learning_rate": 9.487179487179487e-06, |
|
"loss": 1.5227, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09574695447745245, |
|
"grad_norm": 2.4538073539733887, |
|
"learning_rate": 9.572649572649575e-06, |
|
"loss": 1.6464, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09660183799957256, |
|
"grad_norm": 2.4926862716674805, |
|
"learning_rate": 9.658119658119659e-06, |
|
"loss": 1.6442, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.09745672152169267, |
|
"grad_norm": 3.169382095336914, |
|
"learning_rate": 9.743589743589744e-06, |
|
"loss": 1.3884, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.09831160504381278, |
|
"grad_norm": 3.1526899337768555, |
|
"learning_rate": 9.82905982905983e-06, |
|
"loss": 1.802, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0991664885659329, |
|
"grad_norm": 2.323765754699707, |
|
"learning_rate": 9.914529914529915e-06, |
|
"loss": 1.4209, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.100021372088053, |
|
"grad_norm": 2.223529815673828, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6514, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10087625561017312, |
|
"grad_norm": 2.611987590789795, |
|
"learning_rate": 9.999994998012932e-06, |
|
"loss": 1.4056, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.10173113913229323, |
|
"grad_norm": 2.104203939437866, |
|
"learning_rate": 9.999979992061739e-06, |
|
"loss": 1.5361, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10258602265441334, |
|
"grad_norm": 2.0273749828338623, |
|
"learning_rate": 9.999954982176439e-06, |
|
"loss": 1.7908, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10344090617653345, |
|
"grad_norm": 2.2206482887268066, |
|
"learning_rate": 9.999919968407077e-06, |
|
"loss": 1.5552, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.10429578969865355, |
|
"grad_norm": 2.1885461807250977, |
|
"learning_rate": 9.999874950823707e-06, |
|
"loss": 1.8227, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.10515067322077366, |
|
"grad_norm": 2.532996892929077, |
|
"learning_rate": 9.999819929516398e-06, |
|
"loss": 1.7544, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.10600555674289378, |
|
"grad_norm": 2.10715913772583, |
|
"learning_rate": 9.999754904595237e-06, |
|
"loss": 1.6595, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.10686044026501389, |
|
"grad_norm": 2.810800790786743, |
|
"learning_rate": 9.99967987619033e-06, |
|
"loss": 1.5268, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.107715323787134, |
|
"grad_norm": 2.2151639461517334, |
|
"learning_rate": 9.999594844451787e-06, |
|
"loss": 1.6595, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.10857020730925411, |
|
"grad_norm": 3.241767168045044, |
|
"learning_rate": 9.999499809549742e-06, |
|
"loss": 1.5238, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.10942509083137422, |
|
"grad_norm": 2.3466122150421143, |
|
"learning_rate": 9.99939477167434e-06, |
|
"loss": 1.7066, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11027997435349433, |
|
"grad_norm": 2.006361246109009, |
|
"learning_rate": 9.999279731035741e-06, |
|
"loss": 1.7432, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11113485787561445, |
|
"grad_norm": 2.3450944423675537, |
|
"learning_rate": 9.999154687864118e-06, |
|
"loss": 1.5009, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11198974139773456, |
|
"grad_norm": 2.390202522277832, |
|
"learning_rate": 9.999019642409654e-06, |
|
"loss": 1.6976, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11284462491985467, |
|
"grad_norm": 3.701263189315796, |
|
"learning_rate": 9.99887459494255e-06, |
|
"loss": 1.6372, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11369950844197478, |
|
"grad_norm": 2.3453369140625, |
|
"learning_rate": 9.998719545753015e-06, |
|
"loss": 1.6543, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11455439196409489, |
|
"grad_norm": 2.018839120864868, |
|
"learning_rate": 9.998554495151272e-06, |
|
"loss": 1.7406, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.115409275486215, |
|
"grad_norm": 2.201392889022827, |
|
"learning_rate": 9.998379443467552e-06, |
|
"loss": 1.6153, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11626415900833512, |
|
"grad_norm": 2.081862449645996, |
|
"learning_rate": 9.998194391052097e-06, |
|
"loss": 1.6368, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.11711904253045523, |
|
"grad_norm": 2.114025592803955, |
|
"learning_rate": 9.99799933827516e-06, |
|
"loss": 1.5724, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.11797392605257534, |
|
"grad_norm": 2.1307966709136963, |
|
"learning_rate": 9.997794285527002e-06, |
|
"loss": 1.6064, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.11882880957469545, |
|
"grad_norm": 2.265519142150879, |
|
"learning_rate": 9.99757923321789e-06, |
|
"loss": 1.6883, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.11968369309681556, |
|
"grad_norm": 2.3927133083343506, |
|
"learning_rate": 9.997354181778101e-06, |
|
"loss": 1.4476, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12053857661893567, |
|
"grad_norm": 2.2618932723999023, |
|
"learning_rate": 9.997119131657915e-06, |
|
"loss": 1.3637, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12139346014105579, |
|
"grad_norm": 2.382002353668213, |
|
"learning_rate": 9.996874083327621e-06, |
|
"loss": 1.4367, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1222483436631759, |
|
"grad_norm": 2.655059337615967, |
|
"learning_rate": 9.99661903727751e-06, |
|
"loss": 1.7059, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12310322718529601, |
|
"grad_norm": 1.9572539329528809, |
|
"learning_rate": 9.996353994017876e-06, |
|
"loss": 1.697, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12395811070741611, |
|
"grad_norm": 2.7208900451660156, |
|
"learning_rate": 9.996078954079017e-06, |
|
"loss": 1.6188, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12481299422953622, |
|
"grad_norm": 2.2011163234710693, |
|
"learning_rate": 9.995793918011231e-06, |
|
"loss": 1.4557, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12566787775165633, |
|
"grad_norm": 2.1749141216278076, |
|
"learning_rate": 9.995498886384816e-06, |
|
"loss": 1.701, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12652276127377646, |
|
"grad_norm": 2.7541191577911377, |
|
"learning_rate": 9.995193859790071e-06, |
|
"loss": 1.7045, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.12737764479589656, |
|
"grad_norm": 2.489243984222412, |
|
"learning_rate": 9.994878838837292e-06, |
|
"loss": 1.5494, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.12823252831801668, |
|
"grad_norm": 2.1933681964874268, |
|
"learning_rate": 9.99455382415677e-06, |
|
"loss": 1.6329, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12908741184013678, |
|
"grad_norm": 2.036159038543701, |
|
"learning_rate": 9.994218816398794e-06, |
|
"loss": 1.6208, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1299422953622569, |
|
"grad_norm": 2.751338005065918, |
|
"learning_rate": 9.993873816233642e-06, |
|
"loss": 1.593, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.130797178884377, |
|
"grad_norm": 3.4958271980285645, |
|
"learning_rate": 9.993518824351593e-06, |
|
"loss": 1.4269, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13165206240649713, |
|
"grad_norm": 3.862670421600342, |
|
"learning_rate": 9.993153841462912e-06, |
|
"loss": 1.5538, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13250694592861723, |
|
"grad_norm": 3.251422166824341, |
|
"learning_rate": 9.992778868297855e-06, |
|
"loss": 1.5283, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13336182945073732, |
|
"grad_norm": 2.1368870735168457, |
|
"learning_rate": 9.992393905606665e-06, |
|
"loss": 1.7056, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13421671297285745, |
|
"grad_norm": 2.3016269207000732, |
|
"learning_rate": 9.991998954159574e-06, |
|
"loss": 1.6743, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13507159649497755, |
|
"grad_norm": 2.4640464782714844, |
|
"learning_rate": 9.991594014746797e-06, |
|
"loss": 1.6553, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13592648001709767, |
|
"grad_norm": 2.2740697860717773, |
|
"learning_rate": 9.991179088178538e-06, |
|
"loss": 1.9916, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13678136353921777, |
|
"grad_norm": 2.08636736869812, |
|
"learning_rate": 9.99075417528498e-06, |
|
"loss": 1.8245, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1376362470613379, |
|
"grad_norm": 2.2464828491210938, |
|
"learning_rate": 9.990319276916282e-06, |
|
"loss": 1.4567, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.138491130583458, |
|
"grad_norm": 1.9842820167541504, |
|
"learning_rate": 9.98987439394259e-06, |
|
"loss": 1.7119, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.13934601410557812, |
|
"grad_norm": 2.2388288974761963, |
|
"learning_rate": 9.989419527254025e-06, |
|
"loss": 1.5563, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14020089762769822, |
|
"grad_norm": 2.1352884769439697, |
|
"learning_rate": 9.98895467776068e-06, |
|
"loss": 1.7504, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14105578114981834, |
|
"grad_norm": 2.172581672668457, |
|
"learning_rate": 9.988479846392621e-06, |
|
"loss": 1.6671, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14191066467193844, |
|
"grad_norm": 2.1291165351867676, |
|
"learning_rate": 9.987995034099891e-06, |
|
"loss": 1.6297, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14276554819405857, |
|
"grad_norm": 2.3043532371520996, |
|
"learning_rate": 9.987500241852501e-06, |
|
"loss": 1.4487, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.14362043171617866, |
|
"grad_norm": 2.1369128227233887, |
|
"learning_rate": 9.986995470640426e-06, |
|
"loss": 1.536, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1444753152382988, |
|
"grad_norm": 2.52339768409729, |
|
"learning_rate": 9.98648072147361e-06, |
|
"loss": 1.651, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1453301987604189, |
|
"grad_norm": 2.230353832244873, |
|
"learning_rate": 9.985955995381961e-06, |
|
"loss": 1.6593, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14618508228253901, |
|
"grad_norm": 1.9805208444595337, |
|
"learning_rate": 9.985421293415352e-06, |
|
"loss": 1.6041, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1470399658046591, |
|
"grad_norm": 2.5390121936798096, |
|
"learning_rate": 9.984876616643605e-06, |
|
"loss": 1.6453, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.14789484932677924, |
|
"grad_norm": 2.267672538757324, |
|
"learning_rate": 9.984321966156511e-06, |
|
"loss": 1.4635, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.14874973284889934, |
|
"grad_norm": 2.2240028381347656, |
|
"learning_rate": 9.98375734306381e-06, |
|
"loss": 1.5787, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.14960461637101946, |
|
"grad_norm": 2.0597617626190186, |
|
"learning_rate": 9.9831827484952e-06, |
|
"loss": 1.8143, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15045949989313956, |
|
"grad_norm": 2.1702632904052734, |
|
"learning_rate": 9.98259818360032e-06, |
|
"loss": 1.3521, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15131438341525968, |
|
"grad_norm": 2.1068551540374756, |
|
"learning_rate": 9.982003649548773e-06, |
|
"loss": 1.642, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.15216926693737978, |
|
"grad_norm": 2.5010616779327393, |
|
"learning_rate": 9.981399147530095e-06, |
|
"loss": 1.7123, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.15302415045949988, |
|
"grad_norm": 2.073054790496826, |
|
"learning_rate": 9.98078467875377e-06, |
|
"loss": 1.6237, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.15387903398162, |
|
"grad_norm": 2.0609512329101562, |
|
"learning_rate": 9.980160244449225e-06, |
|
"loss": 1.6043, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1547339175037401, |
|
"grad_norm": 2.184964418411255, |
|
"learning_rate": 9.979525845865826e-06, |
|
"loss": 1.7787, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15558880102586023, |
|
"grad_norm": 1.9945862293243408, |
|
"learning_rate": 9.978881484272873e-06, |
|
"loss": 1.573, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15644368454798033, |
|
"grad_norm": 2.4625461101531982, |
|
"learning_rate": 9.978227160959602e-06, |
|
"loss": 1.486, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15729856807010045, |
|
"grad_norm": 2.747724771499634, |
|
"learning_rate": 9.977562877235178e-06, |
|
"loss": 1.8208, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.15815345159222055, |
|
"grad_norm": 2.442047357559204, |
|
"learning_rate": 9.976888634428699e-06, |
|
"loss": 1.519, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15900833511434068, |
|
"grad_norm": 2.035658836364746, |
|
"learning_rate": 9.976204433889185e-06, |
|
"loss": 1.7189, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.15986321863646077, |
|
"grad_norm": 1.9837456941604614, |
|
"learning_rate": 9.975510276985581e-06, |
|
"loss": 1.6247, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1607181021585809, |
|
"grad_norm": 2.1273021697998047, |
|
"learning_rate": 9.974806165106753e-06, |
|
"loss": 1.588, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.161572985680701, |
|
"grad_norm": 2.254690647125244, |
|
"learning_rate": 9.974092099661485e-06, |
|
"loss": 1.4978, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.16242786920282112, |
|
"grad_norm": 2.070093870162964, |
|
"learning_rate": 9.973368082078474e-06, |
|
"loss": 1.5023, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16328275272494122, |
|
"grad_norm": 2.2594425678253174, |
|
"learning_rate": 9.972634113806332e-06, |
|
"loss": 1.3271, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16413763624706135, |
|
"grad_norm": 2.137816905975342, |
|
"learning_rate": 9.971890196313578e-06, |
|
"loss": 1.6029, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.16499251976918144, |
|
"grad_norm": 2.150294542312622, |
|
"learning_rate": 9.971136331088638e-06, |
|
"loss": 1.5479, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.16584740329130157, |
|
"grad_norm": 2.0613577365875244, |
|
"learning_rate": 9.970372519639843e-06, |
|
"loss": 1.6085, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16670228681342167, |
|
"grad_norm": 2.489182233810425, |
|
"learning_rate": 9.969598763495422e-06, |
|
"loss": 1.5447, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1675571703355418, |
|
"grad_norm": 1.9882975816726685, |
|
"learning_rate": 9.968815064203503e-06, |
|
"loss": 1.6983, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1684120538576619, |
|
"grad_norm": 2.307633638381958, |
|
"learning_rate": 9.968021423332106e-06, |
|
"loss": 1.3364, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.16926693737978202, |
|
"grad_norm": 2.16860032081604, |
|
"learning_rate": 9.967217842469144e-06, |
|
"loss": 1.4998, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17012182090190212, |
|
"grad_norm": 2.1140408515930176, |
|
"learning_rate": 9.966404323222419e-06, |
|
"loss": 1.7047, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1709767044240222, |
|
"grad_norm": 2.0061354637145996, |
|
"learning_rate": 9.965580867219615e-06, |
|
"loss": 1.6829, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17183158794614234, |
|
"grad_norm": 2.1682636737823486, |
|
"learning_rate": 9.964747476108297e-06, |
|
"loss": 1.599, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17268647146826244, |
|
"grad_norm": 2.050809144973755, |
|
"learning_rate": 9.963904151555913e-06, |
|
"loss": 1.6234, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17354135499038256, |
|
"grad_norm": 2.009514331817627, |
|
"learning_rate": 9.963050895249779e-06, |
|
"loss": 1.6771, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.17439623851250266, |
|
"grad_norm": 2.012481689453125, |
|
"learning_rate": 9.962187708897088e-06, |
|
"loss": 1.5752, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.17525112203462279, |
|
"grad_norm": 2.2250049114227295, |
|
"learning_rate": 9.961314594224897e-06, |
|
"loss": 1.6165, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17610600555674288, |
|
"grad_norm": 2.289846658706665, |
|
"learning_rate": 9.960431552980132e-06, |
|
"loss": 1.5527, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.176960889078863, |
|
"grad_norm": 2.110806703567505, |
|
"learning_rate": 9.959538586929576e-06, |
|
"loss": 1.5558, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1778157726009831, |
|
"grad_norm": 1.9617878198623657, |
|
"learning_rate": 9.958635697859871e-06, |
|
"loss": 1.6633, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.17867065612310323, |
|
"grad_norm": 2.2823057174682617, |
|
"learning_rate": 9.95772288757751e-06, |
|
"loss": 1.6224, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.17952553964522333, |
|
"grad_norm": 1.9670095443725586, |
|
"learning_rate": 9.956800157908843e-06, |
|
"loss": 1.8749, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18038042316734346, |
|
"grad_norm": 2.7742271423339844, |
|
"learning_rate": 9.955867510700064e-06, |
|
"loss": 1.486, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.18123530668946355, |
|
"grad_norm": 2.0588271617889404, |
|
"learning_rate": 9.954924947817202e-06, |
|
"loss": 1.5673, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18209019021158368, |
|
"grad_norm": 1.9455853700637817, |
|
"learning_rate": 9.953972471146139e-06, |
|
"loss": 1.5381, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18294507373370378, |
|
"grad_norm": 2.869638442993164, |
|
"learning_rate": 9.95301008259258e-06, |
|
"loss": 1.5812, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.1837999572558239, |
|
"grad_norm": 2.755342960357666, |
|
"learning_rate": 9.952037784082069e-06, |
|
"loss": 1.7378, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.184654840777944, |
|
"grad_norm": 2.0044925212860107, |
|
"learning_rate": 9.951055577559978e-06, |
|
"loss": 1.6928, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.18550972430006413, |
|
"grad_norm": 1.9753495454788208, |
|
"learning_rate": 9.950063464991498e-06, |
|
"loss": 1.6793, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.18636460782218423, |
|
"grad_norm": 2.6321005821228027, |
|
"learning_rate": 9.949061448361645e-06, |
|
"loss": 1.8052, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.18721949134430435, |
|
"grad_norm": 1.9599953889846802, |
|
"learning_rate": 9.948049529675246e-06, |
|
"loss": 1.6699, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18807437486642445, |
|
"grad_norm": 2.058579444885254, |
|
"learning_rate": 9.947027710956944e-06, |
|
"loss": 1.5779, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18892925838854457, |
|
"grad_norm": 1.9399949312210083, |
|
"learning_rate": 9.945995994251189e-06, |
|
"loss": 1.7076, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.18978414191066467, |
|
"grad_norm": 2.8098912239074707, |
|
"learning_rate": 9.944954381622234e-06, |
|
"loss": 1.5293, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.19063902543278477, |
|
"grad_norm": 2.3304898738861084, |
|
"learning_rate": 9.943902875154133e-06, |
|
"loss": 1.5303, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.1914939089549049, |
|
"grad_norm": 1.800581693649292, |
|
"learning_rate": 9.94284147695073e-06, |
|
"loss": 1.871, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.192348792477025, |
|
"grad_norm": 1.8096035718917847, |
|
"learning_rate": 9.941770189135674e-06, |
|
"loss": 1.6497, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19320367599914512, |
|
"grad_norm": 2.4061684608459473, |
|
"learning_rate": 9.940689013852385e-06, |
|
"loss": 1.3758, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.19405855952126522, |
|
"grad_norm": 2.075972557067871, |
|
"learning_rate": 9.939597953264075e-06, |
|
"loss": 1.6361, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.19491344304338534, |
|
"grad_norm": 1.9757728576660156, |
|
"learning_rate": 9.938497009553732e-06, |
|
"loss": 1.6045, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.19576832656550544, |
|
"grad_norm": 2.0147316455841064, |
|
"learning_rate": 9.937386184924119e-06, |
|
"loss": 1.718, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.19662321008762557, |
|
"grad_norm": 4.8961334228515625, |
|
"learning_rate": 9.93626548159777e-06, |
|
"loss": 1.8579, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19747809360974566, |
|
"grad_norm": 3.387730836868286, |
|
"learning_rate": 9.935134901816977e-06, |
|
"loss": 1.4704, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.1983329771318658, |
|
"grad_norm": 3.4209892749786377, |
|
"learning_rate": 9.933994447843804e-06, |
|
"loss": 1.7328, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.1991878606539859, |
|
"grad_norm": 2.224865436553955, |
|
"learning_rate": 9.932844121960062e-06, |
|
"loss": 1.5973, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.200042744176106, |
|
"grad_norm": 2.7523677349090576, |
|
"learning_rate": 9.931683926467318e-06, |
|
"loss": 1.4736, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2008976276982261, |
|
"grad_norm": 2.1172382831573486, |
|
"learning_rate": 9.930513863686885e-06, |
|
"loss": 1.5492, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.20175251122034624, |
|
"grad_norm": 2.0145204067230225, |
|
"learning_rate": 9.92933393595982e-06, |
|
"loss": 1.7651, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.20260739474246633, |
|
"grad_norm": 1.7892417907714844, |
|
"learning_rate": 9.928144145646915e-06, |
|
"loss": 1.8183, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.20346227826458646, |
|
"grad_norm": 2.0646109580993652, |
|
"learning_rate": 9.926944495128696e-06, |
|
"loss": 1.6102, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.20431716178670656, |
|
"grad_norm": 2.2819433212280273, |
|
"learning_rate": 9.925734986805418e-06, |
|
"loss": 1.7732, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.20517204530882668, |
|
"grad_norm": 1.963572382926941, |
|
"learning_rate": 9.924515623097059e-06, |
|
"loss": 1.786, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.20602692883094678, |
|
"grad_norm": 2.0390403270721436, |
|
"learning_rate": 9.923286406443315e-06, |
|
"loss": 1.6186, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2068818123530669, |
|
"grad_norm": 2.267930507659912, |
|
"learning_rate": 9.922047339303597e-06, |
|
"loss": 1.5218, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.207736695875187, |
|
"grad_norm": 1.9752867221832275, |
|
"learning_rate": 9.920798424157025e-06, |
|
"loss": 1.874, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2085915793973071, |
|
"grad_norm": 2.595473289489746, |
|
"learning_rate": 9.91953966350242e-06, |
|
"loss": 1.6422, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.20944646291942723, |
|
"grad_norm": 2.4579105377197266, |
|
"learning_rate": 9.918271059858304e-06, |
|
"loss": 1.539, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.21030134644154733, |
|
"grad_norm": 2.0564873218536377, |
|
"learning_rate": 9.916992615762895e-06, |
|
"loss": 1.5645, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.21115622996366745, |
|
"grad_norm": 2.313728094100952, |
|
"learning_rate": 9.915704333774095e-06, |
|
"loss": 1.5913, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.21201111348578755, |
|
"grad_norm": 2.0686261653900146, |
|
"learning_rate": 9.914406216469493e-06, |
|
"loss": 1.6991, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.21286599700790768, |
|
"grad_norm": 1.7527450323104858, |
|
"learning_rate": 9.913098266446354e-06, |
|
"loss": 1.5427, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.21372088053002777, |
|
"grad_norm": 1.9648592472076416, |
|
"learning_rate": 9.911780486321618e-06, |
|
"loss": 1.5907, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2145757640521479, |
|
"grad_norm": 2.190805196762085, |
|
"learning_rate": 9.910452878731895e-06, |
|
"loss": 1.5391, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.215430647574268, |
|
"grad_norm": 2.548785924911499, |
|
"learning_rate": 9.909115446333453e-06, |
|
"loss": 1.823, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.21628553109638812, |
|
"grad_norm": 2.668999671936035, |
|
"learning_rate": 9.907768191802221e-06, |
|
"loss": 1.4534, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.21714041461850822, |
|
"grad_norm": 1.8880456686019897, |
|
"learning_rate": 9.906411117833778e-06, |
|
"loss": 1.9095, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.21799529814062835, |
|
"grad_norm": 2.0296988487243652, |
|
"learning_rate": 9.905044227143351e-06, |
|
"loss": 1.5362, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21885018166274844, |
|
"grad_norm": 1.850406289100647, |
|
"learning_rate": 9.90366752246581e-06, |
|
"loss": 1.5792, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.21970506518486857, |
|
"grad_norm": 2.1268975734710693, |
|
"learning_rate": 9.902281006555654e-06, |
|
"loss": 1.732, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.22055994870698867, |
|
"grad_norm": 2.0648953914642334, |
|
"learning_rate": 9.90088468218702e-06, |
|
"loss": 1.5056, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.2214148322291088, |
|
"grad_norm": 2.415849447250366, |
|
"learning_rate": 9.899478552153665e-06, |
|
"loss": 1.5931, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.2222697157512289, |
|
"grad_norm": 2.0099523067474365, |
|
"learning_rate": 9.89806261926897e-06, |
|
"loss": 1.6716, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22312459927334902, |
|
"grad_norm": 2.1277308464050293, |
|
"learning_rate": 9.896636886365921e-06, |
|
"loss": 1.5538, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.22397948279546911, |
|
"grad_norm": 1.9735395908355713, |
|
"learning_rate": 9.895201356297122e-06, |
|
"loss": 1.6561, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.22483436631758924, |
|
"grad_norm": 2.20600962638855, |
|
"learning_rate": 9.89375603193477e-06, |
|
"loss": 1.7162, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.22568924983970934, |
|
"grad_norm": 2.5070505142211914, |
|
"learning_rate": 9.892300916170665e-06, |
|
"loss": 1.5809, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.22654413336182946, |
|
"grad_norm": 2.2711572647094727, |
|
"learning_rate": 9.890836011916194e-06, |
|
"loss": 1.5735, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22739901688394956, |
|
"grad_norm": 2.1966934204101562, |
|
"learning_rate": 9.88936132210233e-06, |
|
"loss": 1.7117, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.22825390040606966, |
|
"grad_norm": 2.4135377407073975, |
|
"learning_rate": 9.887876849679627e-06, |
|
"loss": 1.4651, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.22910878392818979, |
|
"grad_norm": 1.9522583484649658, |
|
"learning_rate": 9.886382597618207e-06, |
|
"loss": 1.6792, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.22996366745030988, |
|
"grad_norm": 2.727830171585083, |
|
"learning_rate": 9.884878568907764e-06, |
|
"loss": 1.5722, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.23081855097243, |
|
"grad_norm": 2.306396245956421, |
|
"learning_rate": 9.883364766557549e-06, |
|
"loss": 1.6057, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2316734344945501, |
|
"grad_norm": 2.0543172359466553, |
|
"learning_rate": 9.88184119359637e-06, |
|
"loss": 1.5904, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.23252831801667023, |
|
"grad_norm": 1.944240927696228, |
|
"learning_rate": 9.880307853072582e-06, |
|
"loss": 1.5547, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.23338320153879033, |
|
"grad_norm": 1.8696750402450562, |
|
"learning_rate": 9.878764748054089e-06, |
|
"loss": 1.611, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.23423808506091046, |
|
"grad_norm": 1.9814125299453735, |
|
"learning_rate": 9.877211881628325e-06, |
|
"loss": 1.5352, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.23509296858303055, |
|
"grad_norm": 4.211121559143066, |
|
"learning_rate": 9.87564925690226e-06, |
|
"loss": 1.6153, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23594785210515068, |
|
"grad_norm": 3.5215139389038086, |
|
"learning_rate": 9.874076877002381e-06, |
|
"loss": 1.4883, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.23680273562727078, |
|
"grad_norm": 2.044966459274292, |
|
"learning_rate": 9.872494745074701e-06, |
|
"loss": 1.5476, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2376576191493909, |
|
"grad_norm": 2.230739116668701, |
|
"learning_rate": 9.87090286428474e-06, |
|
"loss": 1.5347, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.238512502671511, |
|
"grad_norm": 7.256809234619141, |
|
"learning_rate": 9.869301237817524e-06, |
|
"loss": 1.7037, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.23936738619363113, |
|
"grad_norm": 2.069016218185425, |
|
"learning_rate": 9.867689868877582e-06, |
|
"loss": 1.6771, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24022226971575122, |
|
"grad_norm": 2.5683460235595703, |
|
"learning_rate": 9.866068760688929e-06, |
|
"loss": 1.5833, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.24107715323787135, |
|
"grad_norm": 2.004464864730835, |
|
"learning_rate": 9.864437916495073e-06, |
|
"loss": 1.6213, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.24193203675999145, |
|
"grad_norm": 2.210275411605835, |
|
"learning_rate": 9.862797339558995e-06, |
|
"loss": 1.5354, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.24278692028211157, |
|
"grad_norm": 2.1438090801239014, |
|
"learning_rate": 9.861147033163156e-06, |
|
"loss": 1.6764, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.24364180380423167, |
|
"grad_norm": 1.9565024375915527, |
|
"learning_rate": 9.85948700060948e-06, |
|
"loss": 1.6768, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2444966873263518, |
|
"grad_norm": 2.793376922607422, |
|
"learning_rate": 9.85781724521935e-06, |
|
"loss": 1.5542, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2453515708484719, |
|
"grad_norm": 2.833108425140381, |
|
"learning_rate": 9.856137770333606e-06, |
|
"loss": 1.6302, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.24620645437059202, |
|
"grad_norm": 2.1430552005767822, |
|
"learning_rate": 9.85444857931253e-06, |
|
"loss": 1.6496, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.24706133789271212, |
|
"grad_norm": 1.821040153503418, |
|
"learning_rate": 9.85274967553585e-06, |
|
"loss": 1.6246, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.24791622141483222, |
|
"grad_norm": 2.6233530044555664, |
|
"learning_rate": 9.851041062402721e-06, |
|
"loss": 1.4109, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24877110493695234, |
|
"grad_norm": 1.9718334674835205, |
|
"learning_rate": 9.849322743331731e-06, |
|
"loss": 1.6219, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.24962598845907244, |
|
"grad_norm": 1.8950848579406738, |
|
"learning_rate": 9.847594721760878e-06, |
|
"loss": 1.6511, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.25048087198119257, |
|
"grad_norm": 2.438082218170166, |
|
"learning_rate": 9.845857001147585e-06, |
|
"loss": 1.5432, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.25048087198119257, |
|
"eval_loss": 1.6429485082626343, |
|
"eval_runtime": 535.3503, |
|
"eval_samples_per_second": 7.39, |
|
"eval_steps_per_second": 3.695, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.25133575550331266, |
|
"grad_norm": 2.273573398590088, |
|
"learning_rate": 9.844109584968669e-06, |
|
"loss": 1.5812, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.25219063902543276, |
|
"grad_norm": 1.8384723663330078, |
|
"learning_rate": 9.842352476720354e-06, |
|
"loss": 1.6735, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2530455225475529, |
|
"grad_norm": 1.9818137884140015, |
|
"learning_rate": 9.840585679918254e-06, |
|
"loss": 1.7463, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.253900406069673, |
|
"grad_norm": 2.039534568786621, |
|
"learning_rate": 9.838809198097365e-06, |
|
"loss": 1.6754, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.2547552895917931, |
|
"grad_norm": 3.1951630115509033, |
|
"learning_rate": 9.837023034812064e-06, |
|
"loss": 1.5875, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.2556101731139132, |
|
"grad_norm": 2.5072474479675293, |
|
"learning_rate": 9.835227193636096e-06, |
|
"loss": 1.4731, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.25646505663603336, |
|
"grad_norm": 1.7682746648788452, |
|
"learning_rate": 9.83342167816257e-06, |
|
"loss": 1.6874, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25731994015815346, |
|
"grad_norm": 2.0054051876068115, |
|
"learning_rate": 9.831606492003955e-06, |
|
"loss": 1.7077, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.25817482368027356, |
|
"grad_norm": 1.800396203994751, |
|
"learning_rate": 9.829781638792064e-06, |
|
"loss": 1.714, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.25902970720239366, |
|
"grad_norm": 2.194918394088745, |
|
"learning_rate": 9.827947122178055e-06, |
|
"loss": 1.6603, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.2598845907245138, |
|
"grad_norm": 2.0959277153015137, |
|
"learning_rate": 9.826102945832418e-06, |
|
"loss": 1.6439, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2607394742466339, |
|
"grad_norm": 2.1359193325042725, |
|
"learning_rate": 9.824249113444973e-06, |
|
"loss": 1.6057, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.261594357768754, |
|
"grad_norm": 1.9840806722640991, |
|
"learning_rate": 9.822385628724855e-06, |
|
"loss": 1.5768, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2624492412908741, |
|
"grad_norm": 1.8947420120239258, |
|
"learning_rate": 9.82051249540052e-06, |
|
"loss": 1.6567, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.26330412481299426, |
|
"grad_norm": 1.949657678604126, |
|
"learning_rate": 9.818629717219719e-06, |
|
"loss": 1.6144, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.26415900833511435, |
|
"grad_norm": 2.000495195388794, |
|
"learning_rate": 9.816737297949506e-06, |
|
"loss": 1.671, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.26501389185723445, |
|
"grad_norm": 2.1426568031311035, |
|
"learning_rate": 9.814835241376223e-06, |
|
"loss": 1.7611, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.26586877537935455, |
|
"grad_norm": 1.849526047706604, |
|
"learning_rate": 9.812923551305497e-06, |
|
"loss": 1.67, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.26672365890147465, |
|
"grad_norm": 1.818123459815979, |
|
"learning_rate": 9.811002231562228e-06, |
|
"loss": 1.5275, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2675785424235948, |
|
"grad_norm": 2.141369104385376, |
|
"learning_rate": 9.80907128599058e-06, |
|
"loss": 1.4483, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2684334259457149, |
|
"grad_norm": 1.8215101957321167, |
|
"learning_rate": 9.80713071845398e-06, |
|
"loss": 1.4223, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.269288309467835, |
|
"grad_norm": 1.7793519496917725, |
|
"learning_rate": 9.805180532835104e-06, |
|
"loss": 1.8701, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2701431929899551, |
|
"grad_norm": 2.3831069469451904, |
|
"learning_rate": 9.803220733035876e-06, |
|
"loss": 1.5475, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.27099807651207525, |
|
"grad_norm": 2.3979852199554443, |
|
"learning_rate": 9.801251322977454e-06, |
|
"loss": 1.7673, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.27185296003419535, |
|
"grad_norm": 2.326536178588867, |
|
"learning_rate": 9.799272306600219e-06, |
|
"loss": 1.776, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.27270784355631544, |
|
"grad_norm": 2.0572667121887207, |
|
"learning_rate": 9.79728368786378e-06, |
|
"loss": 1.597, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.27356272707843554, |
|
"grad_norm": 2.6097846031188965, |
|
"learning_rate": 9.795285470746954e-06, |
|
"loss": 1.5121, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2744176106005557, |
|
"grad_norm": 2.138073682785034, |
|
"learning_rate": 9.793277659247764e-06, |
|
"loss": 1.5842, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.2752724941226758, |
|
"grad_norm": 1.9335135221481323, |
|
"learning_rate": 9.79126025738343e-06, |
|
"loss": 1.678, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2761273776447959, |
|
"grad_norm": 1.9786878824234009, |
|
"learning_rate": 9.789233269190357e-06, |
|
"loss": 1.634, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.276982261166916, |
|
"grad_norm": 1.932175636291504, |
|
"learning_rate": 9.787196698724133e-06, |
|
"loss": 1.6037, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.27783714468903614, |
|
"grad_norm": 2.077538013458252, |
|
"learning_rate": 9.785150550059519e-06, |
|
"loss": 1.5051, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27869202821115624, |
|
"grad_norm": 1.9720157384872437, |
|
"learning_rate": 9.783094827290439e-06, |
|
"loss": 1.5322, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.27954691173327634, |
|
"grad_norm": 1.9299103021621704, |
|
"learning_rate": 9.781029534529968e-06, |
|
"loss": 1.6145, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.28040179525539644, |
|
"grad_norm": 2.12306547164917, |
|
"learning_rate": 9.778954675910337e-06, |
|
"loss": 1.6878, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2812566787775166, |
|
"grad_norm": 1.9829449653625488, |
|
"learning_rate": 9.776870255582913e-06, |
|
"loss": 1.7578, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.2821115622996367, |
|
"grad_norm": 2.2889063358306885, |
|
"learning_rate": 9.774776277718193e-06, |
|
"loss": 1.4426, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2829664458217568, |
|
"grad_norm": 2.5552847385406494, |
|
"learning_rate": 9.772672746505794e-06, |
|
"loss": 1.8471, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.2838213293438769, |
|
"grad_norm": 2.1380834579467773, |
|
"learning_rate": 9.770559666154454e-06, |
|
"loss": 1.5864, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.284676212865997, |
|
"grad_norm": 2.1155436038970947, |
|
"learning_rate": 9.76843704089201e-06, |
|
"loss": 1.6244, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.28553109638811713, |
|
"grad_norm": 1.8792701959609985, |
|
"learning_rate": 9.766304874965403e-06, |
|
"loss": 1.7114, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.28638597991023723, |
|
"grad_norm": 2.630060911178589, |
|
"learning_rate": 9.764163172640657e-06, |
|
"loss": 1.704, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28724086343235733, |
|
"grad_norm": 2.6928205490112305, |
|
"learning_rate": 9.76201193820288e-06, |
|
"loss": 1.4833, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.2880957469544774, |
|
"grad_norm": 1.9738703966140747, |
|
"learning_rate": 9.759851175956252e-06, |
|
"loss": 1.7405, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.2889506304765976, |
|
"grad_norm": 1.8967580795288086, |
|
"learning_rate": 9.757680890224013e-06, |
|
"loss": 1.6763, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.2898055139987177, |
|
"grad_norm": 1.9165318012237549, |
|
"learning_rate": 9.75550108534846e-06, |
|
"loss": 1.6598, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.2906603975208378, |
|
"grad_norm": 1.984978437423706, |
|
"learning_rate": 9.753311765690935e-06, |
|
"loss": 1.5119, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2915152810429579, |
|
"grad_norm": 1.8836838006973267, |
|
"learning_rate": 9.751112935631816e-06, |
|
"loss": 1.6941, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.29237016456507803, |
|
"grad_norm": 1.778017520904541, |
|
"learning_rate": 9.748904599570517e-06, |
|
"loss": 1.6655, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.2932250480871981, |
|
"grad_norm": 2.1019034385681152, |
|
"learning_rate": 9.746686761925456e-06, |
|
"loss": 1.6087, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.2940799316093182, |
|
"grad_norm": 2.2590184211730957, |
|
"learning_rate": 9.744459427134081e-06, |
|
"loss": 1.3935, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.2949348151314383, |
|
"grad_norm": 1.9555338621139526, |
|
"learning_rate": 9.742222599652824e-06, |
|
"loss": 1.6565, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2957896986535585, |
|
"grad_norm": 1.791080355644226, |
|
"learning_rate": 9.739976283957122e-06, |
|
"loss": 1.7648, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.2966445821756786, |
|
"grad_norm": 2.701005697250366, |
|
"learning_rate": 9.737720484541389e-06, |
|
"loss": 1.6442, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.29749946569779867, |
|
"grad_norm": 2.1853907108306885, |
|
"learning_rate": 9.735455205919019e-06, |
|
"loss": 1.5882, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.29835434921991877, |
|
"grad_norm": 2.8202414512634277, |
|
"learning_rate": 9.733180452622371e-06, |
|
"loss": 1.6558, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2992092327420389, |
|
"grad_norm": 1.9966498613357544, |
|
"learning_rate": 9.730896229202756e-06, |
|
"loss": 1.5792, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.300064116264159, |
|
"grad_norm": 1.8057630062103271, |
|
"learning_rate": 9.72860254023044e-06, |
|
"loss": 1.8001, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3009189997862791, |
|
"grad_norm": 1.988755702972412, |
|
"learning_rate": 9.726299390294621e-06, |
|
"loss": 1.8377, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3017738833083992, |
|
"grad_norm": 1.7807461023330688, |
|
"learning_rate": 9.72398678400343e-06, |
|
"loss": 1.6874, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.30262876683051937, |
|
"grad_norm": 2.122286081314087, |
|
"learning_rate": 9.72166472598392e-06, |
|
"loss": 1.4228, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.30348365035263947, |
|
"grad_norm": 2.2886228561401367, |
|
"learning_rate": 9.719333220882051e-06, |
|
"loss": 1.7398, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.30433853387475956, |
|
"grad_norm": 1.948338270187378, |
|
"learning_rate": 9.716992273362686e-06, |
|
"loss": 1.5523, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.30519341739687966, |
|
"grad_norm": 1.9473958015441895, |
|
"learning_rate": 9.714641888109582e-06, |
|
"loss": 1.7423, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.30604830091899976, |
|
"grad_norm": 2.157661199569702, |
|
"learning_rate": 9.712282069825375e-06, |
|
"loss": 1.3287, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3069031844411199, |
|
"grad_norm": 3.0535852909088135, |
|
"learning_rate": 9.70991282323158e-06, |
|
"loss": 1.5311, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.30775806796324, |
|
"grad_norm": 1.8765023946762085, |
|
"learning_rate": 9.707534153068574e-06, |
|
"loss": 1.7105, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3086129514853601, |
|
"grad_norm": 2.0064175128936768, |
|
"learning_rate": 9.705146064095585e-06, |
|
"loss": 1.6531, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3094678350074802, |
|
"grad_norm": 1.9886373281478882, |
|
"learning_rate": 9.70274856109069e-06, |
|
"loss": 1.4997, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.31032271852960036, |
|
"grad_norm": 1.990229606628418, |
|
"learning_rate": 9.700341648850802e-06, |
|
"loss": 1.5256, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.31117760205172046, |
|
"grad_norm": 2.106940507888794, |
|
"learning_rate": 9.697925332191656e-06, |
|
"loss": 1.7129, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.31203248557384056, |
|
"grad_norm": 1.7736214399337769, |
|
"learning_rate": 9.695499615947808e-06, |
|
"loss": 1.6478, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.31288736909596065, |
|
"grad_norm": 2.1179401874542236, |
|
"learning_rate": 9.693064504972619e-06, |
|
"loss": 1.5524, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.3137422526180808, |
|
"grad_norm": 1.916581392288208, |
|
"learning_rate": 9.690620004138245e-06, |
|
"loss": 1.6106, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.3145971361402009, |
|
"grad_norm": 1.9544050693511963, |
|
"learning_rate": 9.68816611833563e-06, |
|
"loss": 1.4856, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.315452019662321, |
|
"grad_norm": 1.9140318632125854, |
|
"learning_rate": 9.685702852474499e-06, |
|
"loss": 1.5316, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3163069031844411, |
|
"grad_norm": 2.6337473392486572, |
|
"learning_rate": 9.683230211483337e-06, |
|
"loss": 1.6554, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31716178670656126, |
|
"grad_norm": 4.639531135559082, |
|
"learning_rate": 9.680748200309396e-06, |
|
"loss": 1.6353, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.31801667022868135, |
|
"grad_norm": 2.012089967727661, |
|
"learning_rate": 9.678256823918668e-06, |
|
"loss": 1.5873, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.31887155375080145, |
|
"grad_norm": 2.249965190887451, |
|
"learning_rate": 9.67575608729589e-06, |
|
"loss": 1.5251, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.31972643727292155, |
|
"grad_norm": 2.2432241439819336, |
|
"learning_rate": 9.673245995444516e-06, |
|
"loss": 1.5292, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3205813207950417, |
|
"grad_norm": 1.8957768678665161, |
|
"learning_rate": 9.67072655338673e-06, |
|
"loss": 1.542, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3214362043171618, |
|
"grad_norm": 2.318333148956299, |
|
"learning_rate": 9.668197766163417e-06, |
|
"loss": 1.6483, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3222910878392819, |
|
"grad_norm": 1.8972350358963013, |
|
"learning_rate": 9.665659638834162e-06, |
|
"loss": 1.7009, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.323145971361402, |
|
"grad_norm": 1.7293806076049805, |
|
"learning_rate": 9.663112176477238e-06, |
|
"loss": 1.7716, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.3240008548835221, |
|
"grad_norm": 1.9904060363769531, |
|
"learning_rate": 9.660555384189592e-06, |
|
"loss": 1.6335, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.32485573840564225, |
|
"grad_norm": 1.9652825593948364, |
|
"learning_rate": 9.657989267086842e-06, |
|
"loss": 1.5519, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.32571062192776234, |
|
"grad_norm": 2.0220279693603516, |
|
"learning_rate": 9.655413830303264e-06, |
|
"loss": 1.5513, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.32656550544988244, |
|
"grad_norm": 1.9460409879684448, |
|
"learning_rate": 9.652829078991775e-06, |
|
"loss": 1.4417, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.32742038897200254, |
|
"grad_norm": 1.9341466426849365, |
|
"learning_rate": 9.650235018323932e-06, |
|
"loss": 1.5433, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.3282752724941227, |
|
"grad_norm": 1.7564873695373535, |
|
"learning_rate": 9.647631653489922e-06, |
|
"loss": 1.7237, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.3291301560162428, |
|
"grad_norm": 1.9628024101257324, |
|
"learning_rate": 9.645018989698541e-06, |
|
"loss": 1.7277, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3299850395383629, |
|
"grad_norm": 2.3082334995269775, |
|
"learning_rate": 9.642397032177194e-06, |
|
"loss": 1.5495, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.330839923060483, |
|
"grad_norm": 1.866422414779663, |
|
"learning_rate": 9.63976578617188e-06, |
|
"loss": 1.7534, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.33169480658260314, |
|
"grad_norm": 2.317007541656494, |
|
"learning_rate": 9.637125256947182e-06, |
|
"loss": 1.5052, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.33254969010472324, |
|
"grad_norm": 1.8729028701782227, |
|
"learning_rate": 9.634475449786256e-06, |
|
"loss": 1.8099, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.33340457362684334, |
|
"grad_norm": 2.69675874710083, |
|
"learning_rate": 9.631816369990826e-06, |
|
"loss": 1.7547, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.33425945714896343, |
|
"grad_norm": 1.989051103591919, |
|
"learning_rate": 9.629148022881162e-06, |
|
"loss": 1.3242, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.3351143406710836, |
|
"grad_norm": 1.821259617805481, |
|
"learning_rate": 9.62647041379608e-06, |
|
"loss": 1.7114, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.3359692241932037, |
|
"grad_norm": 2.20082688331604, |
|
"learning_rate": 9.623783548092927e-06, |
|
"loss": 1.6421, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.3368241077153238, |
|
"grad_norm": 2.1007845401763916, |
|
"learning_rate": 9.621087431147571e-06, |
|
"loss": 1.5982, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.3376789912374439, |
|
"grad_norm": 1.973475456237793, |
|
"learning_rate": 9.618382068354385e-06, |
|
"loss": 1.4475, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33853387475956404, |
|
"grad_norm": 2.067143201828003, |
|
"learning_rate": 9.615667465126249e-06, |
|
"loss": 1.5659, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.33938875828168413, |
|
"grad_norm": 4.515519142150879, |
|
"learning_rate": 9.612943626894524e-06, |
|
"loss": 1.5314, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.34024364180380423, |
|
"grad_norm": 2.152055501937866, |
|
"learning_rate": 9.610210559109053e-06, |
|
"loss": 1.6649, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.34109852532592433, |
|
"grad_norm": 1.7932733297348022, |
|
"learning_rate": 9.607468267238144e-06, |
|
"loss": 1.623, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.3419534088480444, |
|
"grad_norm": 2.0391805171966553, |
|
"learning_rate": 9.60471675676856e-06, |
|
"loss": 1.3268, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3428082923701646, |
|
"grad_norm": 1.702877163887024, |
|
"learning_rate": 9.60195603320551e-06, |
|
"loss": 1.7459, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.3436631758922847, |
|
"grad_norm": 2.219301700592041, |
|
"learning_rate": 9.599186102072633e-06, |
|
"loss": 1.4155, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.3445180594144048, |
|
"grad_norm": 2.3838083744049072, |
|
"learning_rate": 9.596406968911994e-06, |
|
"loss": 1.6431, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.3453729429365249, |
|
"grad_norm": 1.8397514820098877, |
|
"learning_rate": 9.59361863928407e-06, |
|
"loss": 1.6565, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.346227826458645, |
|
"grad_norm": 1.9715574979782104, |
|
"learning_rate": 9.590821118767735e-06, |
|
"loss": 1.5712, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3470827099807651, |
|
"grad_norm": 1.8566009998321533, |
|
"learning_rate": 9.588014412960251e-06, |
|
"loss": 1.4365, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.3479375935028852, |
|
"grad_norm": 2.3083534240722656, |
|
"learning_rate": 9.585198527477266e-06, |
|
"loss": 1.6871, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.3487924770250053, |
|
"grad_norm": 2.463879346847534, |
|
"learning_rate": 9.582373467952785e-06, |
|
"loss": 1.347, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.3496473605471255, |
|
"grad_norm": 1.7422118186950684, |
|
"learning_rate": 9.579539240039175e-06, |
|
"loss": 1.6822, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.35050224406924557, |
|
"grad_norm": 3.396388292312622, |
|
"learning_rate": 9.576695849407142e-06, |
|
"loss": 1.4162, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35135712759136567, |
|
"grad_norm": 1.8444187641143799, |
|
"learning_rate": 9.573843301745729e-06, |
|
"loss": 1.49, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.35221201111348577, |
|
"grad_norm": 1.8690928220748901, |
|
"learning_rate": 9.570981602762298e-06, |
|
"loss": 1.677, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.3530668946356059, |
|
"grad_norm": 1.8715145587921143, |
|
"learning_rate": 9.568110758182521e-06, |
|
"loss": 1.5221, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.353921778157726, |
|
"grad_norm": 2.1251678466796875, |
|
"learning_rate": 9.565230773750369e-06, |
|
"loss": 1.4464, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.3547766616798461, |
|
"grad_norm": 1.9180314540863037, |
|
"learning_rate": 9.562341655228102e-06, |
|
"loss": 1.5082, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3556315452019662, |
|
"grad_norm": 2.0336544513702393, |
|
"learning_rate": 9.559443408396252e-06, |
|
"loss": 1.6297, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.35648642872408637, |
|
"grad_norm": 1.9119011163711548, |
|
"learning_rate": 9.556536039053614e-06, |
|
"loss": 1.5721, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.35734131224620647, |
|
"grad_norm": 2.0506341457366943, |
|
"learning_rate": 9.553619553017242e-06, |
|
"loss": 1.5863, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.35819619576832656, |
|
"grad_norm": 2.2243218421936035, |
|
"learning_rate": 9.550693956122423e-06, |
|
"loss": 1.6749, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.35905107929044666, |
|
"grad_norm": 2.0671815872192383, |
|
"learning_rate": 9.547759254222677e-06, |
|
"loss": 1.5347, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3599059628125668, |
|
"grad_norm": 2.6808528900146484, |
|
"learning_rate": 9.544815453189742e-06, |
|
"loss": 1.5961, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3607608463346869, |
|
"grad_norm": 2.706782102584839, |
|
"learning_rate": 9.541862558913557e-06, |
|
"loss": 1.7542, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.361615729856807, |
|
"grad_norm": 1.9116767644882202, |
|
"learning_rate": 9.538900577302259e-06, |
|
"loss": 1.5888, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3624706133789271, |
|
"grad_norm": 1.8376832008361816, |
|
"learning_rate": 9.535929514282165e-06, |
|
"loss": 1.7108, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3633254969010472, |
|
"grad_norm": 2.479327917098999, |
|
"learning_rate": 9.532949375797763e-06, |
|
"loss": 1.6665, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36418038042316736, |
|
"grad_norm": 2.165562629699707, |
|
"learning_rate": 9.5299601678117e-06, |
|
"loss": 1.6313, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.36503526394528746, |
|
"grad_norm": 2.4955830574035645, |
|
"learning_rate": 9.526961896304764e-06, |
|
"loss": 1.392, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.36589014746740756, |
|
"grad_norm": 2.231637477874756, |
|
"learning_rate": 9.523954567275886e-06, |
|
"loss": 1.6392, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.36674503098952765, |
|
"grad_norm": 1.9232414960861206, |
|
"learning_rate": 9.520938186742111e-06, |
|
"loss": 1.6145, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3675999145116478, |
|
"grad_norm": 1.8367480039596558, |
|
"learning_rate": 9.5179127607386e-06, |
|
"loss": 1.6133, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3684547980337679, |
|
"grad_norm": 1.8861103057861328, |
|
"learning_rate": 9.514878295318605e-06, |
|
"loss": 1.5945, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.369309681555888, |
|
"grad_norm": 2.045668601989746, |
|
"learning_rate": 9.511834796553474e-06, |
|
"loss": 1.7027, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3701645650780081, |
|
"grad_norm": 1.8058552742004395, |
|
"learning_rate": 9.508782270532622e-06, |
|
"loss": 1.5022, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.37101944860012825, |
|
"grad_norm": 2.5390875339508057, |
|
"learning_rate": 9.505720723363526e-06, |
|
"loss": 1.5491, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.37187433212224835, |
|
"grad_norm": 1.892199158668518, |
|
"learning_rate": 9.502650161171714e-06, |
|
"loss": 1.5664, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.37272921564436845, |
|
"grad_norm": 1.891270399093628, |
|
"learning_rate": 9.49957059010075e-06, |
|
"loss": 1.6434, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.37358409916648855, |
|
"grad_norm": 1.884423851966858, |
|
"learning_rate": 9.49648201631223e-06, |
|
"loss": 1.6827, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.3744389826886087, |
|
"grad_norm": 1.9029580354690552, |
|
"learning_rate": 9.493384445985748e-06, |
|
"loss": 1.7887, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.3752938662107288, |
|
"grad_norm": 2.3171463012695312, |
|
"learning_rate": 9.490277885318911e-06, |
|
"loss": 1.6336, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.3761487497328489, |
|
"grad_norm": 1.94197416305542, |
|
"learning_rate": 9.48716234052731e-06, |
|
"loss": 1.5117, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.377003633254969, |
|
"grad_norm": 2.3445913791656494, |
|
"learning_rate": 9.48403781784451e-06, |
|
"loss": 1.6445, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.37785851677708915, |
|
"grad_norm": 1.9795849323272705, |
|
"learning_rate": 9.480904323522039e-06, |
|
"loss": 1.6885, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.37871340029920925, |
|
"grad_norm": 1.7723476886749268, |
|
"learning_rate": 9.477761863829376e-06, |
|
"loss": 1.545, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.37956828382132934, |
|
"grad_norm": 1.9953805208206177, |
|
"learning_rate": 9.474610445053941e-06, |
|
"loss": 1.6542, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.38042316734344944, |
|
"grad_norm": 2.2689387798309326, |
|
"learning_rate": 9.471450073501075e-06, |
|
"loss": 1.6453, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.38127805086556954, |
|
"grad_norm": 2.2516140937805176, |
|
"learning_rate": 9.46828075549403e-06, |
|
"loss": 1.5717, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.3821329343876897, |
|
"grad_norm": 1.8665640354156494, |
|
"learning_rate": 9.465102497373964e-06, |
|
"loss": 1.5351, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.3829878179098098, |
|
"grad_norm": 1.901228904724121, |
|
"learning_rate": 9.46191530549992e-06, |
|
"loss": 1.8935, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.3838427014319299, |
|
"grad_norm": 2.023531436920166, |
|
"learning_rate": 9.458719186248813e-06, |
|
"loss": 1.4649, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.38469758495405, |
|
"grad_norm": 2.481318950653076, |
|
"learning_rate": 9.455514146015425e-06, |
|
"loss": 1.205, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38555246847617014, |
|
"grad_norm": 2.31809401512146, |
|
"learning_rate": 9.45230019121238e-06, |
|
"loss": 1.6223, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.38640735199829024, |
|
"grad_norm": 2.2454018592834473, |
|
"learning_rate": 9.449077328270144e-06, |
|
"loss": 1.747, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.38726223552041034, |
|
"grad_norm": 2.1338086128234863, |
|
"learning_rate": 9.445845563637004e-06, |
|
"loss": 1.6782, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.38811711904253043, |
|
"grad_norm": 1.8717645406723022, |
|
"learning_rate": 9.442604903779058e-06, |
|
"loss": 1.6059, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.3889720025646506, |
|
"grad_norm": 2.139616012573242, |
|
"learning_rate": 9.439355355180203e-06, |
|
"loss": 1.5604, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.3898268860867707, |
|
"grad_norm": 3.311588764190674, |
|
"learning_rate": 9.436096924342119e-06, |
|
"loss": 1.5347, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.3906817696088908, |
|
"grad_norm": 2.202603578567505, |
|
"learning_rate": 9.432829617784254e-06, |
|
"loss": 1.4768, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.3915366531310109, |
|
"grad_norm": 2.028428554534912, |
|
"learning_rate": 9.429553442043822e-06, |
|
"loss": 1.5481, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.39239153665313103, |
|
"grad_norm": 1.914510726928711, |
|
"learning_rate": 9.426268403675776e-06, |
|
"loss": 1.5443, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.39324642017525113, |
|
"grad_norm": 1.7629053592681885, |
|
"learning_rate": 9.422974509252806e-06, |
|
"loss": 1.7737, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39410130369737123, |
|
"grad_norm": 1.8821051120758057, |
|
"learning_rate": 9.419671765365317e-06, |
|
"loss": 1.6567, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.39495618721949133, |
|
"grad_norm": 1.9063416719436646, |
|
"learning_rate": 9.416360178621423e-06, |
|
"loss": 1.5636, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.3958110707416115, |
|
"grad_norm": 2.077802896499634, |
|
"learning_rate": 9.413039755646928e-06, |
|
"loss": 1.726, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.3966659542637316, |
|
"grad_norm": 2.192138433456421, |
|
"learning_rate": 9.40971050308532e-06, |
|
"loss": 1.6637, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.3975208377858517, |
|
"grad_norm": 1.9042564630508423, |
|
"learning_rate": 9.406372427597748e-06, |
|
"loss": 1.7897, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3983757213079718, |
|
"grad_norm": 55.77303695678711, |
|
"learning_rate": 9.403025535863017e-06, |
|
"loss": 1.6107, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.3992306048300919, |
|
"grad_norm": 1.9976872205734253, |
|
"learning_rate": 9.39966983457757e-06, |
|
"loss": 1.4734, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.400085488352212, |
|
"grad_norm": 2.049694299697876, |
|
"learning_rate": 9.396305330455477e-06, |
|
"loss": 1.5076, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.4009403718743321, |
|
"grad_norm": 1.9261916875839233, |
|
"learning_rate": 9.39293203022842e-06, |
|
"loss": 1.7984, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4017952553964522, |
|
"grad_norm": 1.9258121252059937, |
|
"learning_rate": 9.389549940645681e-06, |
|
"loss": 1.4655, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4026501389185723, |
|
"grad_norm": 1.7797189950942993, |
|
"learning_rate": 9.38615906847413e-06, |
|
"loss": 1.6192, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.4035050224406925, |
|
"grad_norm": 1.9286493062973022, |
|
"learning_rate": 9.382759420498203e-06, |
|
"loss": 1.7695, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.40435990596281257, |
|
"grad_norm": 2.3398804664611816, |
|
"learning_rate": 9.3793510035199e-06, |
|
"loss": 1.4717, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.40521478948493267, |
|
"grad_norm": 1.9439282417297363, |
|
"learning_rate": 9.375933824358761e-06, |
|
"loss": 1.4579, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.40606967300705277, |
|
"grad_norm": 1.8554619550704956, |
|
"learning_rate": 9.372507889851863e-06, |
|
"loss": 1.6733, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4069245565291729, |
|
"grad_norm": 2.129397392272949, |
|
"learning_rate": 9.3690732068538e-06, |
|
"loss": 1.7154, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.407779440051293, |
|
"grad_norm": 2.1848104000091553, |
|
"learning_rate": 9.365629782236665e-06, |
|
"loss": 1.7401, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4086343235734131, |
|
"grad_norm": 2.366959810256958, |
|
"learning_rate": 9.362177622890042e-06, |
|
"loss": 1.4182, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4094892070955332, |
|
"grad_norm": 2.0443150997161865, |
|
"learning_rate": 9.358716735721001e-06, |
|
"loss": 1.6127, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.41034409061765337, |
|
"grad_norm": 1.9910573959350586, |
|
"learning_rate": 9.355247127654059e-06, |
|
"loss": 1.6025, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.41119897413977347, |
|
"grad_norm": 1.7490131855010986, |
|
"learning_rate": 9.351768805631195e-06, |
|
"loss": 1.636, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.41205385766189356, |
|
"grad_norm": 1.9974701404571533, |
|
"learning_rate": 9.348281776611817e-06, |
|
"loss": 1.7417, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.41290874118401366, |
|
"grad_norm": 2.488692283630371, |
|
"learning_rate": 9.34478604757275e-06, |
|
"loss": 1.4733, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.4137636247061338, |
|
"grad_norm": 2.012645959854126, |
|
"learning_rate": 9.341281625508239e-06, |
|
"loss": 1.6016, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.4146185082282539, |
|
"grad_norm": 1.7562576532363892, |
|
"learning_rate": 9.337768517429906e-06, |
|
"loss": 1.6269, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.415473391750374, |
|
"grad_norm": 3.038557291030884, |
|
"learning_rate": 9.334246730366763e-06, |
|
"loss": 1.7471, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.4163282752724941, |
|
"grad_norm": 1.993278980255127, |
|
"learning_rate": 9.330716271365183e-06, |
|
"loss": 1.5104, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.4171831587946142, |
|
"grad_norm": 2.662400007247925, |
|
"learning_rate": 9.32717714748889e-06, |
|
"loss": 1.5185, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.41803804231673436, |
|
"grad_norm": 2.5048787593841553, |
|
"learning_rate": 9.323629365818945e-06, |
|
"loss": 1.6455, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.41889292583885446, |
|
"grad_norm": 1.7933145761489868, |
|
"learning_rate": 9.32007293345373e-06, |
|
"loss": 1.6, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.41974780936097456, |
|
"grad_norm": 1.6806269884109497, |
|
"learning_rate": 9.316507857508936e-06, |
|
"loss": 1.6584, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.42060269288309465, |
|
"grad_norm": 2.03226375579834, |
|
"learning_rate": 9.312934145117552e-06, |
|
"loss": 1.7171, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.4214575764052148, |
|
"grad_norm": 1.8401916027069092, |
|
"learning_rate": 9.30935180342984e-06, |
|
"loss": 1.5444, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.4223124599273349, |
|
"grad_norm": 1.91862154006958, |
|
"learning_rate": 9.305760839613332e-06, |
|
"loss": 1.5491, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.423167343449455, |
|
"grad_norm": 2.2723190784454346, |
|
"learning_rate": 9.302161260852809e-06, |
|
"loss": 1.7911, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4240222269715751, |
|
"grad_norm": 2.3843624591827393, |
|
"learning_rate": 9.29855307435029e-06, |
|
"loss": 1.4825, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.42487711049369525, |
|
"grad_norm": 13.173310279846191, |
|
"learning_rate": 9.294936287325016e-06, |
|
"loss": 1.6597, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.42573199401581535, |
|
"grad_norm": 1.7834957838058472, |
|
"learning_rate": 9.291310907013435e-06, |
|
"loss": 1.6817, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.42658687753793545, |
|
"grad_norm": 2.1975371837615967, |
|
"learning_rate": 9.287676940669192e-06, |
|
"loss": 1.6483, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.42744176106005555, |
|
"grad_norm": 1.8264548778533936, |
|
"learning_rate": 9.284034395563105e-06, |
|
"loss": 1.5933, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4282966445821757, |
|
"grad_norm": 1.8206008672714233, |
|
"learning_rate": 9.280383278983161e-06, |
|
"loss": 1.5615, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4291515281042958, |
|
"grad_norm": 2.0878608226776123, |
|
"learning_rate": 9.276723598234493e-06, |
|
"loss": 1.5504, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.4300064116264159, |
|
"grad_norm": 1.9497365951538086, |
|
"learning_rate": 9.273055360639374e-06, |
|
"loss": 1.5884, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.430861295148536, |
|
"grad_norm": 2.368556022644043, |
|
"learning_rate": 9.269378573537194e-06, |
|
"loss": 1.3559, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.43171617867065615, |
|
"grad_norm": 1.90982186794281, |
|
"learning_rate": 9.265693244284449e-06, |
|
"loss": 1.6106, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.43257106219277625, |
|
"grad_norm": 2.013579845428467, |
|
"learning_rate": 9.261999380254728e-06, |
|
"loss": 1.5972, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.43342594571489634, |
|
"grad_norm": 1.9895778894424438, |
|
"learning_rate": 9.258296988838692e-06, |
|
"loss": 1.6102, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.43428082923701644, |
|
"grad_norm": 1.9256671667099, |
|
"learning_rate": 9.25458607744407e-06, |
|
"loss": 1.5448, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.4351357127591366, |
|
"grad_norm": 1.9200236797332764, |
|
"learning_rate": 9.250866653495631e-06, |
|
"loss": 1.6368, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.4359905962812567, |
|
"grad_norm": 2.1511929035186768, |
|
"learning_rate": 9.247138724435185e-06, |
|
"loss": 1.6384, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4368454798033768, |
|
"grad_norm": 2.1603779792785645, |
|
"learning_rate": 9.243402297721546e-06, |
|
"loss": 1.4907, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.4377003633254969, |
|
"grad_norm": 1.9871954917907715, |
|
"learning_rate": 9.239657380830543e-06, |
|
"loss": 1.6423, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.438555246847617, |
|
"grad_norm": 1.997007131576538, |
|
"learning_rate": 9.235903981254981e-06, |
|
"loss": 1.5842, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.43941013036973714, |
|
"grad_norm": 1.8282665014266968, |
|
"learning_rate": 9.232142106504648e-06, |
|
"loss": 1.5855, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.44026501389185724, |
|
"grad_norm": 2.0519628524780273, |
|
"learning_rate": 9.228371764106279e-06, |
|
"loss": 1.7629, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.44111989741397734, |
|
"grad_norm": 1.9096105098724365, |
|
"learning_rate": 9.224592961603558e-06, |
|
"loss": 1.6367, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.44197478093609743, |
|
"grad_norm": 2.1138973236083984, |
|
"learning_rate": 9.220805706557094e-06, |
|
"loss": 1.5607, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.4428296644582176, |
|
"grad_norm": 1.8094953298568726, |
|
"learning_rate": 9.217010006544406e-06, |
|
"loss": 1.6805, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.4436845479803377, |
|
"grad_norm": 1.847633957862854, |
|
"learning_rate": 9.213205869159912e-06, |
|
"loss": 1.6118, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.4445394315024578, |
|
"grad_norm": 2.2239439487457275, |
|
"learning_rate": 9.209393302014908e-06, |
|
"loss": 1.4869, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4453943150245779, |
|
"grad_norm": 1.8056226968765259, |
|
"learning_rate": 9.205572312737562e-06, |
|
"loss": 1.52, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.44624919854669803, |
|
"grad_norm": 2.1382007598876953, |
|
"learning_rate": 9.201742908972886e-06, |
|
"loss": 1.4253, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.44710408206881813, |
|
"grad_norm": 2.0205068588256836, |
|
"learning_rate": 9.197905098382734e-06, |
|
"loss": 1.577, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.44795896559093823, |
|
"grad_norm": 1.8336679935455322, |
|
"learning_rate": 9.194058888645776e-06, |
|
"loss": 1.5969, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.4488138491130583, |
|
"grad_norm": 1.958166241645813, |
|
"learning_rate": 9.19020428745749e-06, |
|
"loss": 1.5589, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4496687326351785, |
|
"grad_norm": 1.8487937450408936, |
|
"learning_rate": 9.186341302530142e-06, |
|
"loss": 1.8666, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.4505236161572986, |
|
"grad_norm": 1.940935492515564, |
|
"learning_rate": 9.18246994159277e-06, |
|
"loss": 1.7236, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4513784996794187, |
|
"grad_norm": 1.7989500761032104, |
|
"learning_rate": 9.178590212391177e-06, |
|
"loss": 1.3875, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.4522333832015388, |
|
"grad_norm": 2.0304832458496094, |
|
"learning_rate": 9.1747021226879e-06, |
|
"loss": 1.7533, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.45308826672365893, |
|
"grad_norm": 1.8041130304336548, |
|
"learning_rate": 9.170805680262212e-06, |
|
"loss": 1.6563, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.453943150245779, |
|
"grad_norm": 1.8609191179275513, |
|
"learning_rate": 9.166900892910095e-06, |
|
"loss": 1.6209, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4547980337678991, |
|
"grad_norm": 1.9766689538955688, |
|
"learning_rate": 9.162987768444225e-06, |
|
"loss": 1.6673, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.4556529172900192, |
|
"grad_norm": 1.8203067779541016, |
|
"learning_rate": 9.159066314693965e-06, |
|
"loss": 1.5735, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.4565078008121393, |
|
"grad_norm": 1.907524824142456, |
|
"learning_rate": 9.155136539505337e-06, |
|
"loss": 1.7929, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.4573626843342595, |
|
"grad_norm": 2.2627012729644775, |
|
"learning_rate": 9.151198450741013e-06, |
|
"loss": 1.4976, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.45821756785637957, |
|
"grad_norm": 2.3210504055023193, |
|
"learning_rate": 9.147252056280305e-06, |
|
"loss": 1.6316, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.45907245137849967, |
|
"grad_norm": 1.8242363929748535, |
|
"learning_rate": 9.143297364019134e-06, |
|
"loss": 1.5303, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.45992733490061977, |
|
"grad_norm": 2.0939862728118896, |
|
"learning_rate": 9.139334381870032e-06, |
|
"loss": 1.6881, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.4607822184227399, |
|
"grad_norm": 2.001176595687866, |
|
"learning_rate": 9.135363117762111e-06, |
|
"loss": 1.7572, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.46163710194486, |
|
"grad_norm": 1.824602484703064, |
|
"learning_rate": 9.131383579641055e-06, |
|
"loss": 1.6849, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4624919854669801, |
|
"grad_norm": 1.765763521194458, |
|
"learning_rate": 9.127395775469105e-06, |
|
"loss": 1.6392, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.4633468689891002, |
|
"grad_norm": 2.4184436798095703, |
|
"learning_rate": 9.123399713225039e-06, |
|
"loss": 1.496, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.46420175251122037, |
|
"grad_norm": 1.7558503150939941, |
|
"learning_rate": 9.119395400904154e-06, |
|
"loss": 1.7759, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.46505663603334046, |
|
"grad_norm": 2.245574474334717, |
|
"learning_rate": 9.115382846518263e-06, |
|
"loss": 1.6061, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.46591151955546056, |
|
"grad_norm": 2.0234146118164062, |
|
"learning_rate": 9.11136205809566e-06, |
|
"loss": 1.4992, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.46676640307758066, |
|
"grad_norm": 1.8050514459609985, |
|
"learning_rate": 9.10733304368112e-06, |
|
"loss": 1.6266, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.4676212865997008, |
|
"grad_norm": 1.7917343378067017, |
|
"learning_rate": 9.103295811335873e-06, |
|
"loss": 1.8325, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.4684761701218209, |
|
"grad_norm": 1.9059429168701172, |
|
"learning_rate": 9.099250369137593e-06, |
|
"loss": 1.7297, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.469331053643941, |
|
"grad_norm": 1.9517232179641724, |
|
"learning_rate": 9.095196725180378e-06, |
|
"loss": 1.7357, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.4701859371660611, |
|
"grad_norm": 2.318387985229492, |
|
"learning_rate": 9.09113488757474e-06, |
|
"loss": 1.5877, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.47104082068818126, |
|
"grad_norm": 1.796807885169983, |
|
"learning_rate": 9.087064864447583e-06, |
|
"loss": 1.5986, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.47189570421030136, |
|
"grad_norm": 1.8475953340530396, |
|
"learning_rate": 9.082986663942187e-06, |
|
"loss": 1.6098, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.47275058773242146, |
|
"grad_norm": 1.9878709316253662, |
|
"learning_rate": 9.078900294218196e-06, |
|
"loss": 1.7224, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.47360547125454155, |
|
"grad_norm": 1.9042646884918213, |
|
"learning_rate": 9.074805763451595e-06, |
|
"loss": 1.5801, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.47446035477666165, |
|
"grad_norm": 2.0457966327667236, |
|
"learning_rate": 9.070703079834701e-06, |
|
"loss": 1.4993, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.4753152382987818, |
|
"grad_norm": 1.8046479225158691, |
|
"learning_rate": 9.066592251576143e-06, |
|
"loss": 1.7765, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.4761701218209019, |
|
"grad_norm": 1.9781793355941772, |
|
"learning_rate": 9.062473286900843e-06, |
|
"loss": 1.4311, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.477025005343022, |
|
"grad_norm": 2.416982889175415, |
|
"learning_rate": 9.058346194050007e-06, |
|
"loss": 1.5628, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.4778798888651421, |
|
"grad_norm": 2.044501781463623, |
|
"learning_rate": 9.0542109812811e-06, |
|
"loss": 1.5452, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.47873477238726225, |
|
"grad_norm": 1.9374364614486694, |
|
"learning_rate": 9.050067656867833e-06, |
|
"loss": 1.7013, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.47958965590938235, |
|
"grad_norm": 1.9821714162826538, |
|
"learning_rate": 9.045916229100148e-06, |
|
"loss": 1.5827, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.48044453943150245, |
|
"grad_norm": 1.80966055393219, |
|
"learning_rate": 9.041756706284201e-06, |
|
"loss": 1.5384, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.48129942295362255, |
|
"grad_norm": 1.874300241470337, |
|
"learning_rate": 9.037589096742346e-06, |
|
"loss": 1.9239, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.4821543064757427, |
|
"grad_norm": 2.0358619689941406, |
|
"learning_rate": 9.033413408813113e-06, |
|
"loss": 1.4334, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.4830091899978628, |
|
"grad_norm": 2.362205982208252, |
|
"learning_rate": 9.029229650851192e-06, |
|
"loss": 1.4202, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.4838640735199829, |
|
"grad_norm": 2.374218225479126, |
|
"learning_rate": 9.025037831227433e-06, |
|
"loss": 1.4886, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.484718957042103, |
|
"grad_norm": 2.0200891494750977, |
|
"learning_rate": 9.020837958328799e-06, |
|
"loss": 1.5932, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.48557384056422315, |
|
"grad_norm": 1.9744147062301636, |
|
"learning_rate": 9.01663004055838e-06, |
|
"loss": 1.4954, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.48642872408634324, |
|
"grad_norm": 1.8265093564987183, |
|
"learning_rate": 9.012414086335353e-06, |
|
"loss": 1.7541, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.48728360760846334, |
|
"grad_norm": 2.042421340942383, |
|
"learning_rate": 9.008190104094978e-06, |
|
"loss": 1.5381, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.48813849113058344, |
|
"grad_norm": 1.9857375621795654, |
|
"learning_rate": 9.003958102288575e-06, |
|
"loss": 1.3851, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.4889933746527036, |
|
"grad_norm": 1.9522275924682617, |
|
"learning_rate": 8.999718089383516e-06, |
|
"loss": 1.5013, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.4898482581748237, |
|
"grad_norm": 2.621446371078491, |
|
"learning_rate": 8.995470073863192e-06, |
|
"loss": 1.5064, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.4907031416969438, |
|
"grad_norm": 1.8776718378067017, |
|
"learning_rate": 8.991214064227015e-06, |
|
"loss": 1.4914, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.4915580252190639, |
|
"grad_norm": 2.4339656829833984, |
|
"learning_rate": 8.986950068990382e-06, |
|
"loss": 1.4783, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.49241290874118404, |
|
"grad_norm": 1.8802049160003662, |
|
"learning_rate": 8.982678096684677e-06, |
|
"loss": 1.5207, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.49326779226330414, |
|
"grad_norm": 1.9528294801712036, |
|
"learning_rate": 8.978398155857237e-06, |
|
"loss": 1.4501, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.49412267578542424, |
|
"grad_norm": 4.20005464553833, |
|
"learning_rate": 8.974110255071346e-06, |
|
"loss": 1.6392, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.49497755930754433, |
|
"grad_norm": 2.0071566104888916, |
|
"learning_rate": 8.969814402906217e-06, |
|
"loss": 1.6073, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.49583244282966443, |
|
"grad_norm": 1.8972854614257812, |
|
"learning_rate": 8.965510607956966e-06, |
|
"loss": 1.4545, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4966873263517846, |
|
"grad_norm": 1.9058289527893066, |
|
"learning_rate": 8.961198878834602e-06, |
|
"loss": 1.4867, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.4975422098739047, |
|
"grad_norm": 1.9958810806274414, |
|
"learning_rate": 8.956879224166014e-06, |
|
"loss": 1.8224, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.4983970933960248, |
|
"grad_norm": 1.8081709146499634, |
|
"learning_rate": 8.952551652593943e-06, |
|
"loss": 1.7404, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.4992519769181449, |
|
"grad_norm": 2.774141550064087, |
|
"learning_rate": 8.948216172776971e-06, |
|
"loss": 1.7241, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.500106860440265, |
|
"grad_norm": 2.1106200218200684, |
|
"learning_rate": 8.943872793389505e-06, |
|
"loss": 1.5724, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5009617439623851, |
|
"grad_norm": 2.0356194972991943, |
|
"learning_rate": 8.939521523121755e-06, |
|
"loss": 1.6468, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5009617439623851, |
|
"eval_loss": 1.6289023160934448, |
|
"eval_runtime": 534.394, |
|
"eval_samples_per_second": 7.403, |
|
"eval_steps_per_second": 3.701, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5018166274845053, |
|
"grad_norm": 1.933261513710022, |
|
"learning_rate": 8.93516237067972e-06, |
|
"loss": 1.657, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.5026715110066253, |
|
"grad_norm": 2.1306746006011963, |
|
"learning_rate": 8.93079534478517e-06, |
|
"loss": 1.6718, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5035263945287455, |
|
"grad_norm": 1.6752455234527588, |
|
"learning_rate": 8.92642045417563e-06, |
|
"loss": 1.7148, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5043812780508655, |
|
"grad_norm": 1.8051491975784302, |
|
"learning_rate": 8.922037707604355e-06, |
|
"loss": 1.6413, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5052361615729857, |
|
"grad_norm": 1.8917031288146973, |
|
"learning_rate": 8.917647113840323e-06, |
|
"loss": 1.5288, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5060910450951058, |
|
"grad_norm": 2.0400190353393555, |
|
"learning_rate": 8.91324868166821e-06, |
|
"loss": 1.4745, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5069459286172259, |
|
"grad_norm": 2.0113611221313477, |
|
"learning_rate": 8.908842419888379e-06, |
|
"loss": 1.6355, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.507800812139346, |
|
"grad_norm": 1.8284205198287964, |
|
"learning_rate": 8.904428337316855e-06, |
|
"loss": 1.5299, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5086556956614662, |
|
"grad_norm": 1.8652138710021973, |
|
"learning_rate": 8.900006442785312e-06, |
|
"loss": 1.7343, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5095105791835862, |
|
"grad_norm": 1.8095743656158447, |
|
"learning_rate": 8.89557674514105e-06, |
|
"loss": 1.7715, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5103654627057064, |
|
"grad_norm": 2.018333911895752, |
|
"learning_rate": 8.89113925324699e-06, |
|
"loss": 1.6964, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5112203462278264, |
|
"grad_norm": 2.0537750720977783, |
|
"learning_rate": 8.88669397598164e-06, |
|
"loss": 1.4207, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5120752297499466, |
|
"grad_norm": 1.9517449140548706, |
|
"learning_rate": 8.882240922239089e-06, |
|
"loss": 1.6236, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5129301132720667, |
|
"grad_norm": 1.8960734605789185, |
|
"learning_rate": 8.877780100928984e-06, |
|
"loss": 1.6692, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5137849967941868, |
|
"grad_norm": 2.1357903480529785, |
|
"learning_rate": 8.87331152097651e-06, |
|
"loss": 1.9005, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.5146398803163069, |
|
"grad_norm": 2.6492884159088135, |
|
"learning_rate": 8.868835191322384e-06, |
|
"loss": 1.5357, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.515494763838427, |
|
"grad_norm": 3.4608476161956787, |
|
"learning_rate": 8.864351120922819e-06, |
|
"loss": 1.6542, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.5163496473605471, |
|
"grad_norm": 2.1841821670532227, |
|
"learning_rate": 8.859859318749522e-06, |
|
"loss": 1.6261, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.5172045308826673, |
|
"grad_norm": 1.922024130821228, |
|
"learning_rate": 8.855359793789667e-06, |
|
"loss": 1.7292, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5180594144047873, |
|
"grad_norm": 1.988054633140564, |
|
"learning_rate": 8.85085255504588e-06, |
|
"loss": 1.7862, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.5189142979269075, |
|
"grad_norm": 1.9861512184143066, |
|
"learning_rate": 8.84633761153622e-06, |
|
"loss": 1.6567, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.5197691814490276, |
|
"grad_norm": 2.0631234645843506, |
|
"learning_rate": 8.841814972294166e-06, |
|
"loss": 1.3969, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5206240649711477, |
|
"grad_norm": 1.9361271858215332, |
|
"learning_rate": 8.837284646368586e-06, |
|
"loss": 1.4913, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5214789484932678, |
|
"grad_norm": 2.061472177505493, |
|
"learning_rate": 8.832746642823738e-06, |
|
"loss": 1.6651, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5223338320153879, |
|
"grad_norm": 2.5141899585723877, |
|
"learning_rate": 8.82820097073923e-06, |
|
"loss": 1.6464, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.523188715537508, |
|
"grad_norm": 2.2343900203704834, |
|
"learning_rate": 8.823647639210027e-06, |
|
"loss": 1.3821, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.5240435990596282, |
|
"grad_norm": 1.7745505571365356, |
|
"learning_rate": 8.819086657346405e-06, |
|
"loss": 1.562, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5248984825817482, |
|
"grad_norm": 1.795174241065979, |
|
"learning_rate": 8.814518034273956e-06, |
|
"loss": 1.7203, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.5257533661038684, |
|
"grad_norm": 1.829896330833435, |
|
"learning_rate": 8.809941779133555e-06, |
|
"loss": 1.6731, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5266082496259885, |
|
"grad_norm": 1.787853479385376, |
|
"learning_rate": 8.805357901081352e-06, |
|
"loss": 1.5737, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5274631331481086, |
|
"grad_norm": 1.8447085618972778, |
|
"learning_rate": 8.800766409288745e-06, |
|
"loss": 1.7847, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.5283180166702287, |
|
"grad_norm": 2.183621644973755, |
|
"learning_rate": 8.796167312942367e-06, |
|
"loss": 1.3627, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5291729001923487, |
|
"grad_norm": 1.943145513534546, |
|
"learning_rate": 8.791560621244067e-06, |
|
"loss": 1.6397, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5300277837144689, |
|
"grad_norm": 3.3538787364959717, |
|
"learning_rate": 8.786946343410889e-06, |
|
"loss": 1.8127, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5308826672365891, |
|
"grad_norm": 1.7246778011322021, |
|
"learning_rate": 8.782324488675059e-06, |
|
"loss": 1.5005, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5317375507587091, |
|
"grad_norm": 2.007582187652588, |
|
"learning_rate": 8.777695066283955e-06, |
|
"loss": 1.4858, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5325924342808293, |
|
"grad_norm": 1.7573378086090088, |
|
"learning_rate": 8.773058085500106e-06, |
|
"loss": 1.5858, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.5334473178029493, |
|
"grad_norm": 2.4984254837036133, |
|
"learning_rate": 8.768413555601156e-06, |
|
"loss": 1.6391, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5343022013250694, |
|
"grad_norm": 2.0162014961242676, |
|
"learning_rate": 8.763761485879858e-06, |
|
"loss": 1.622, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5351570848471896, |
|
"grad_norm": 2.1535332202911377, |
|
"learning_rate": 8.75910188564405e-06, |
|
"loss": 1.6092, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5360119683693096, |
|
"grad_norm": 2.0587708950042725, |
|
"learning_rate": 8.754434764216632e-06, |
|
"loss": 1.6507, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5368668518914298, |
|
"grad_norm": 2.0657832622528076, |
|
"learning_rate": 8.749760130935564e-06, |
|
"loss": 1.5106, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.53772173541355, |
|
"grad_norm": 3.3556106090545654, |
|
"learning_rate": 8.74507799515382e-06, |
|
"loss": 1.6122, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.53857661893567, |
|
"grad_norm": 1.8298861980438232, |
|
"learning_rate": 8.740388366239397e-06, |
|
"loss": 1.7357, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5394315024577901, |
|
"grad_norm": 2.4920873641967773, |
|
"learning_rate": 8.735691253575278e-06, |
|
"loss": 1.5525, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5402863859799102, |
|
"grad_norm": 1.8337547779083252, |
|
"learning_rate": 8.730986666559425e-06, |
|
"loss": 1.6677, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5411412695020303, |
|
"grad_norm": 2.205390453338623, |
|
"learning_rate": 8.726274614604749e-06, |
|
"loss": 1.7129, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5419961530241505, |
|
"grad_norm": 1.8543306589126587, |
|
"learning_rate": 8.721555107139102e-06, |
|
"loss": 1.4831, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.5428510365462705, |
|
"grad_norm": 1.7591562271118164, |
|
"learning_rate": 8.716828153605245e-06, |
|
"loss": 1.6421, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5437059200683907, |
|
"grad_norm": 2.9822521209716797, |
|
"learning_rate": 8.712093763460846e-06, |
|
"loss": 1.4437, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.5445608035905108, |
|
"grad_norm": 1.7901097536087036, |
|
"learning_rate": 8.70735194617845e-06, |
|
"loss": 1.6315, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5454156871126309, |
|
"grad_norm": 2.0111300945281982, |
|
"learning_rate": 8.702602711245455e-06, |
|
"loss": 1.5924, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.546270570634751, |
|
"grad_norm": 2.0748534202575684, |
|
"learning_rate": 8.697846068164112e-06, |
|
"loss": 1.4713, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5471254541568711, |
|
"grad_norm": 1.9600063562393188, |
|
"learning_rate": 8.693082026451481e-06, |
|
"loss": 1.7141, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5479803376789912, |
|
"grad_norm": 2.1216888427734375, |
|
"learning_rate": 8.688310595639437e-06, |
|
"loss": 1.5694, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5488352212011114, |
|
"grad_norm": 1.7273600101470947, |
|
"learning_rate": 8.683531785274632e-06, |
|
"loss": 1.7303, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5496901047232314, |
|
"grad_norm": 2.0742030143737793, |
|
"learning_rate": 8.678745604918486e-06, |
|
"loss": 1.6376, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.5505449882453516, |
|
"grad_norm": 1.9716432094573975, |
|
"learning_rate": 8.673952064147166e-06, |
|
"loss": 1.4841, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.5513998717674716, |
|
"grad_norm": 2.9787380695343018, |
|
"learning_rate": 8.669151172551558e-06, |
|
"loss": 1.6401, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5522547552895918, |
|
"grad_norm": 2.2232494354248047, |
|
"learning_rate": 8.664342939737265e-06, |
|
"loss": 1.4223, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.5531096388117119, |
|
"grad_norm": 2.1428375244140625, |
|
"learning_rate": 8.659527375324574e-06, |
|
"loss": 1.7418, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.553964522333832, |
|
"grad_norm": 1.9772781133651733, |
|
"learning_rate": 8.654704488948442e-06, |
|
"loss": 1.7229, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.5548194058559521, |
|
"grad_norm": 2.106757164001465, |
|
"learning_rate": 8.649874290258473e-06, |
|
"loss": 1.567, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.5556742893780723, |
|
"grad_norm": 2.3366143703460693, |
|
"learning_rate": 8.645036788918904e-06, |
|
"loss": 1.2882, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5565291729001923, |
|
"grad_norm": 2.5621304512023926, |
|
"learning_rate": 8.640191994608585e-06, |
|
"loss": 1.6807, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.5573840564223125, |
|
"grad_norm": 1.8327507972717285, |
|
"learning_rate": 8.635339917020952e-06, |
|
"loss": 1.5425, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.5582389399444325, |
|
"grad_norm": 1.9729241132736206, |
|
"learning_rate": 8.63048056586402e-06, |
|
"loss": 1.6851, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.5590938234665527, |
|
"grad_norm": 2.0836198329925537, |
|
"learning_rate": 8.625613950860352e-06, |
|
"loss": 1.4055, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.5599487069886728, |
|
"grad_norm": 1.7130488157272339, |
|
"learning_rate": 8.620740081747044e-06, |
|
"loss": 1.7865, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5608035905107929, |
|
"grad_norm": 2.0575125217437744, |
|
"learning_rate": 8.615858968275712e-06, |
|
"loss": 1.6653, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.561658474032913, |
|
"grad_norm": 1.9657676219940186, |
|
"learning_rate": 8.610970620212461e-06, |
|
"loss": 1.5761, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5625133575550332, |
|
"grad_norm": 1.7400262355804443, |
|
"learning_rate": 8.606075047337872e-06, |
|
"loss": 1.7633, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.5633682410771532, |
|
"grad_norm": 1.8632696866989136, |
|
"learning_rate": 8.601172259446981e-06, |
|
"loss": 1.5333, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.5642231245992734, |
|
"grad_norm": 2.7947065830230713, |
|
"learning_rate": 8.596262266349265e-06, |
|
"loss": 1.5627, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5650780081213934, |
|
"grad_norm": 2.0627810955047607, |
|
"learning_rate": 8.591345077868606e-06, |
|
"loss": 1.71, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.5659328916435136, |
|
"grad_norm": 2.519284725189209, |
|
"learning_rate": 8.586420703843297e-06, |
|
"loss": 1.7696, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.5667877751656337, |
|
"grad_norm": 2.0818934440612793, |
|
"learning_rate": 8.581489154125995e-06, |
|
"loss": 1.4629, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.5676426586877538, |
|
"grad_norm": 1.8783445358276367, |
|
"learning_rate": 8.57655043858372e-06, |
|
"loss": 1.3525, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.5684975422098739, |
|
"grad_norm": 2.1661863327026367, |
|
"learning_rate": 8.571604567097827e-06, |
|
"loss": 1.6119, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.569352425731994, |
|
"grad_norm": 1.9182928800582886, |
|
"learning_rate": 8.566651549563993e-06, |
|
"loss": 1.7566, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.5702073092541141, |
|
"grad_norm": 1.9936716556549072, |
|
"learning_rate": 8.561691395892188e-06, |
|
"loss": 1.4571, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.5710621927762343, |
|
"grad_norm": 1.8677414655685425, |
|
"learning_rate": 8.556724116006664e-06, |
|
"loss": 1.5713, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.5719170762983543, |
|
"grad_norm": 1.9004077911376953, |
|
"learning_rate": 8.551749719845928e-06, |
|
"loss": 1.6697, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.5727719598204745, |
|
"grad_norm": 2.03224515914917, |
|
"learning_rate": 8.546768217362724e-06, |
|
"loss": 1.5405, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5736268433425946, |
|
"grad_norm": 1.912906527519226, |
|
"learning_rate": 8.541779618524018e-06, |
|
"loss": 1.5431, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.5744817268647147, |
|
"grad_norm": 1.7738916873931885, |
|
"learning_rate": 8.536783933310972e-06, |
|
"loss": 1.7385, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.5753366103868348, |
|
"grad_norm": 1.7471853494644165, |
|
"learning_rate": 8.53178117171893e-06, |
|
"loss": 1.7176, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.5761914939089549, |
|
"grad_norm": 2.7117345333099365, |
|
"learning_rate": 8.526771343757387e-06, |
|
"loss": 1.6298, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.577046377431075, |
|
"grad_norm": 1.8789851665496826, |
|
"learning_rate": 8.521754459449984e-06, |
|
"loss": 1.6023, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5779012609531952, |
|
"grad_norm": 1.9868645668029785, |
|
"learning_rate": 8.516730528834476e-06, |
|
"loss": 1.7187, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.5787561444753152, |
|
"grad_norm": 1.7088147401809692, |
|
"learning_rate": 8.511699561962719e-06, |
|
"loss": 1.5902, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.5796110279974354, |
|
"grad_norm": 1.722632884979248, |
|
"learning_rate": 8.506661568900644e-06, |
|
"loss": 1.6701, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.5804659115195555, |
|
"grad_norm": 1.8633525371551514, |
|
"learning_rate": 8.50161655972824e-06, |
|
"loss": 1.6822, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.5813207950416756, |
|
"grad_norm": 1.9083114862442017, |
|
"learning_rate": 8.496564544539539e-06, |
|
"loss": 1.5115, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5821756785637957, |
|
"grad_norm": 1.9144874811172485, |
|
"learning_rate": 8.491505533442584e-06, |
|
"loss": 1.6048, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.5830305620859157, |
|
"grad_norm": 1.8423601388931274, |
|
"learning_rate": 8.48643953655942e-06, |
|
"loss": 1.627, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.5838854456080359, |
|
"grad_norm": 1.9638283252716064, |
|
"learning_rate": 8.481366564026065e-06, |
|
"loss": 1.6544, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.5847403291301561, |
|
"grad_norm": 2.422207832336426, |
|
"learning_rate": 8.476286625992499e-06, |
|
"loss": 1.7425, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.5855952126522761, |
|
"grad_norm": 3.2921395301818848, |
|
"learning_rate": 8.471199732622633e-06, |
|
"loss": 1.598, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5864500961743963, |
|
"grad_norm": 1.8565130233764648, |
|
"learning_rate": 8.466105894094298e-06, |
|
"loss": 1.4855, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.5873049796965164, |
|
"grad_norm": 2.0620574951171875, |
|
"learning_rate": 8.461005120599222e-06, |
|
"loss": 1.5109, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.5881598632186364, |
|
"grad_norm": 1.9637583494186401, |
|
"learning_rate": 8.455897422343003e-06, |
|
"loss": 1.6503, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.5890147467407566, |
|
"grad_norm": 7.110237121582031, |
|
"learning_rate": 8.450782809545096e-06, |
|
"loss": 1.6213, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.5898696302628766, |
|
"grad_norm": 2.607851505279541, |
|
"learning_rate": 8.445661292438799e-06, |
|
"loss": 1.7967, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5907245137849968, |
|
"grad_norm": 1.8419544696807861, |
|
"learning_rate": 8.440532881271209e-06, |
|
"loss": 1.6102, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.591579397307117, |
|
"grad_norm": 1.9284480810165405, |
|
"learning_rate": 8.435397586303229e-06, |
|
"loss": 1.5749, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.592434280829237, |
|
"grad_norm": 2.60127854347229, |
|
"learning_rate": 8.43025541780953e-06, |
|
"loss": 1.6948, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.5932891643513571, |
|
"grad_norm": 1.9099665880203247, |
|
"learning_rate": 8.425106386078535e-06, |
|
"loss": 1.614, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.5941440478734772, |
|
"grad_norm": 3.712435245513916, |
|
"learning_rate": 8.419950501412402e-06, |
|
"loss": 1.7601, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.5949989313955973, |
|
"grad_norm": 2.350602149963379, |
|
"learning_rate": 8.414787774126994e-06, |
|
"loss": 1.4524, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.5958538149177175, |
|
"grad_norm": 1.8215548992156982, |
|
"learning_rate": 8.409618214551874e-06, |
|
"loss": 1.7657, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.5967086984398375, |
|
"grad_norm": 1.7211189270019531, |
|
"learning_rate": 8.404441833030269e-06, |
|
"loss": 1.5622, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.5975635819619577, |
|
"grad_norm": 1.90372896194458, |
|
"learning_rate": 8.399258639919053e-06, |
|
"loss": 1.6278, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.5984184654840778, |
|
"grad_norm": 2.5654449462890625, |
|
"learning_rate": 8.394068645588735e-06, |
|
"loss": 1.3127, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5992733490061979, |
|
"grad_norm": 1.8627386093139648, |
|
"learning_rate": 8.38887186042343e-06, |
|
"loss": 1.6181, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.600128232528318, |
|
"grad_norm": 2.154141426086426, |
|
"learning_rate": 8.383668294820835e-06, |
|
"loss": 1.6831, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.6009831160504381, |
|
"grad_norm": 2.0848171710968018, |
|
"learning_rate": 8.37845795919222e-06, |
|
"loss": 1.513, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.6018379995725582, |
|
"grad_norm": 2.3841915130615234, |
|
"learning_rate": 8.373240863962397e-06, |
|
"loss": 1.7102, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.6026928830946784, |
|
"grad_norm": 1.8190158605575562, |
|
"learning_rate": 8.368017019569704e-06, |
|
"loss": 1.7375, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6035477666167984, |
|
"grad_norm": 1.9176008701324463, |
|
"learning_rate": 8.36278643646598e-06, |
|
"loss": 1.6402, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.6044026501389186, |
|
"grad_norm": 1.7704726457595825, |
|
"learning_rate": 8.357549125116548e-06, |
|
"loss": 1.7303, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6052575336610387, |
|
"grad_norm": 2.164355993270874, |
|
"learning_rate": 8.352305096000197e-06, |
|
"loss": 1.6992, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6061124171831588, |
|
"grad_norm": 1.8429981470108032, |
|
"learning_rate": 8.34705435960915e-06, |
|
"loss": 1.492, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6069673007052789, |
|
"grad_norm": 2.5694799423217773, |
|
"learning_rate": 8.341796926449053e-06, |
|
"loss": 1.4145, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.607822184227399, |
|
"grad_norm": 1.7566078901290894, |
|
"learning_rate": 8.336532807038955e-06, |
|
"loss": 1.5889, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6086770677495191, |
|
"grad_norm": 1.7982147932052612, |
|
"learning_rate": 8.331262011911276e-06, |
|
"loss": 1.4725, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6095319512716393, |
|
"grad_norm": 2.6575653553009033, |
|
"learning_rate": 8.325984551611795e-06, |
|
"loss": 1.3907, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6103868347937593, |
|
"grad_norm": 1.905247688293457, |
|
"learning_rate": 8.32070043669963e-06, |
|
"loss": 1.4379, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6112417183158795, |
|
"grad_norm": 2.1292531490325928, |
|
"learning_rate": 8.315409677747207e-06, |
|
"loss": 1.5796, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6120966018379995, |
|
"grad_norm": 2.0380868911743164, |
|
"learning_rate": 8.310112285340253e-06, |
|
"loss": 1.5329, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6129514853601197, |
|
"grad_norm": 2.1652815341949463, |
|
"learning_rate": 8.30480827007776e-06, |
|
"loss": 1.5661, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6138063688822398, |
|
"grad_norm": 1.880020260810852, |
|
"learning_rate": 8.299497642571976e-06, |
|
"loss": 1.4771, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6146612524043599, |
|
"grad_norm": 2.5005481243133545, |
|
"learning_rate": 8.294180413448375e-06, |
|
"loss": 1.559, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.61551613592648, |
|
"grad_norm": 2.4894957542419434, |
|
"learning_rate": 8.288856593345647e-06, |
|
"loss": 1.8233, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6163710194486002, |
|
"grad_norm": 2.2092738151550293, |
|
"learning_rate": 8.28352619291566e-06, |
|
"loss": 1.5552, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.6172259029707202, |
|
"grad_norm": 1.7766340970993042, |
|
"learning_rate": 8.27818922282345e-06, |
|
"loss": 1.5893, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.6180807864928404, |
|
"grad_norm": 2.231019973754883, |
|
"learning_rate": 8.2728456937472e-06, |
|
"loss": 1.4948, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.6189356700149604, |
|
"grad_norm": 1.745932936668396, |
|
"learning_rate": 8.267495616378218e-06, |
|
"loss": 1.5268, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.6197905535370806, |
|
"grad_norm": 2.0638906955718994, |
|
"learning_rate": 8.262139001420908e-06, |
|
"loss": 1.6511, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6206454370592007, |
|
"grad_norm": 2.589869976043701, |
|
"learning_rate": 8.25677585959276e-06, |
|
"loss": 1.4582, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.6215003205813208, |
|
"grad_norm": 1.792433500289917, |
|
"learning_rate": 8.25140620162432e-06, |
|
"loss": 1.8182, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.6223552041034409, |
|
"grad_norm": 2.4035418033599854, |
|
"learning_rate": 8.246030038259168e-06, |
|
"loss": 1.6119, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.6232100876255611, |
|
"grad_norm": 1.8025459051132202, |
|
"learning_rate": 8.24064738025391e-06, |
|
"loss": 1.6602, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.6240649711476811, |
|
"grad_norm": 2.1396055221557617, |
|
"learning_rate": 8.235258238378137e-06, |
|
"loss": 1.7282, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6249198546698013, |
|
"grad_norm": 2.219912052154541, |
|
"learning_rate": 8.229862623414414e-06, |
|
"loss": 1.4117, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.6257747381919213, |
|
"grad_norm": 2.3299050331115723, |
|
"learning_rate": 8.224460546158263e-06, |
|
"loss": 1.8685, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.6266296217140415, |
|
"grad_norm": 1.8506113290786743, |
|
"learning_rate": 8.219052017418132e-06, |
|
"loss": 1.6644, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.6274845052361616, |
|
"grad_norm": 1.7105852365493774, |
|
"learning_rate": 8.213637048015377e-06, |
|
"loss": 1.5937, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.6283393887582817, |
|
"grad_norm": 2.0613906383514404, |
|
"learning_rate": 8.208215648784239e-06, |
|
"loss": 1.4237, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6291942722804018, |
|
"grad_norm": 1.8658533096313477, |
|
"learning_rate": 8.202787830571827e-06, |
|
"loss": 1.5703, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.6300491558025219, |
|
"grad_norm": 1.8278007507324219, |
|
"learning_rate": 8.197353604238092e-06, |
|
"loss": 1.6099, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.630904039324642, |
|
"grad_norm": 1.771121621131897, |
|
"learning_rate": 8.191912980655805e-06, |
|
"loss": 1.6359, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.6317589228467622, |
|
"grad_norm": 1.960795283317566, |
|
"learning_rate": 8.186465970710538e-06, |
|
"loss": 1.4287, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.6326138063688822, |
|
"grad_norm": 2.8525185585021973, |
|
"learning_rate": 8.18101258530064e-06, |
|
"loss": 1.6827, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6334686898910024, |
|
"grad_norm": 2.644296646118164, |
|
"learning_rate": 8.175552835337217e-06, |
|
"loss": 1.6391, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.6343235734131225, |
|
"grad_norm": 1.9095778465270996, |
|
"learning_rate": 8.170086731744108e-06, |
|
"loss": 1.4497, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.6351784569352426, |
|
"grad_norm": 2.616870403289795, |
|
"learning_rate": 8.164614285457865e-06, |
|
"loss": 1.603, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.6360333404573627, |
|
"grad_norm": 2.0195326805114746, |
|
"learning_rate": 8.159135507427728e-06, |
|
"loss": 1.7662, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6368882239794827, |
|
"grad_norm": 1.9145680665969849, |
|
"learning_rate": 8.153650408615612e-06, |
|
"loss": 1.6994, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6377431075016029, |
|
"grad_norm": 1.7627846002578735, |
|
"learning_rate": 8.148158999996072e-06, |
|
"loss": 1.6093, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.638597991023723, |
|
"grad_norm": 2.2810473442077637, |
|
"learning_rate": 8.142661292556289e-06, |
|
"loss": 1.5305, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6394528745458431, |
|
"grad_norm": 1.902381420135498, |
|
"learning_rate": 8.13715729729605e-06, |
|
"loss": 1.5481, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6403077580679633, |
|
"grad_norm": 2.120807647705078, |
|
"learning_rate": 8.131647025227718e-06, |
|
"loss": 1.4634, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6411626415900834, |
|
"grad_norm": 1.7931264638900757, |
|
"learning_rate": 8.126130487376217e-06, |
|
"loss": 1.5927, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6420175251122034, |
|
"grad_norm": 2.521458864212036, |
|
"learning_rate": 8.12060769477901e-06, |
|
"loss": 1.494, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.6428724086343236, |
|
"grad_norm": 1.7701094150543213, |
|
"learning_rate": 8.115078658486069e-06, |
|
"loss": 1.8399, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.6437272921564436, |
|
"grad_norm": 1.8709700107574463, |
|
"learning_rate": 8.109543389559861e-06, |
|
"loss": 1.6305, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6445821756785638, |
|
"grad_norm": 1.9467986822128296, |
|
"learning_rate": 8.104001899075326e-06, |
|
"loss": 1.4801, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.645437059200684, |
|
"grad_norm": 1.808092713356018, |
|
"learning_rate": 8.098454198119847e-06, |
|
"loss": 1.5535, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.646291942722804, |
|
"grad_norm": 2.0982744693756104, |
|
"learning_rate": 8.092900297793238e-06, |
|
"loss": 1.3704, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.6471468262449241, |
|
"grad_norm": 1.8450520038604736, |
|
"learning_rate": 8.087340209207713e-06, |
|
"loss": 1.5287, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.6480017097670442, |
|
"grad_norm": 1.7327725887298584, |
|
"learning_rate": 8.081773943487868e-06, |
|
"loss": 1.7342, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.6488565932891643, |
|
"grad_norm": 1.7313075065612793, |
|
"learning_rate": 8.076201511770656e-06, |
|
"loss": 1.4301, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.6497114768112845, |
|
"grad_norm": 1.9029062986373901, |
|
"learning_rate": 8.070622925205373e-06, |
|
"loss": 1.7459, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6505663603334045, |
|
"grad_norm": 1.8386952877044678, |
|
"learning_rate": 8.065038194953629e-06, |
|
"loss": 1.596, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.6514212438555247, |
|
"grad_norm": 1.8669521808624268, |
|
"learning_rate": 8.059447332189317e-06, |
|
"loss": 1.5973, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.6522761273776448, |
|
"grad_norm": 1.802152395248413, |
|
"learning_rate": 8.053850348098609e-06, |
|
"loss": 1.7115, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.6531310108997649, |
|
"grad_norm": 1.7956647872924805, |
|
"learning_rate": 8.048247253879922e-06, |
|
"loss": 1.73, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.653985894421885, |
|
"grad_norm": 1.870807409286499, |
|
"learning_rate": 8.042638060743898e-06, |
|
"loss": 1.4755, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6548407779440051, |
|
"grad_norm": 1.993764042854309, |
|
"learning_rate": 8.037022779913382e-06, |
|
"loss": 1.2867, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.6556956614661252, |
|
"grad_norm": 1.7236435413360596, |
|
"learning_rate": 8.031401422623398e-06, |
|
"loss": 1.8303, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.6565505449882454, |
|
"grad_norm": 1.9870823621749878, |
|
"learning_rate": 8.02577400012113e-06, |
|
"loss": 1.6378, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.6574054285103654, |
|
"grad_norm": 1.9288864135742188, |
|
"learning_rate": 8.020140523665891e-06, |
|
"loss": 1.5775, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.6582603120324856, |
|
"grad_norm": 1.7029926776885986, |
|
"learning_rate": 8.014501004529118e-06, |
|
"loss": 1.53, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6591151955546057, |
|
"grad_norm": 1.9869303703308105, |
|
"learning_rate": 8.00885545399433e-06, |
|
"loss": 1.4813, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.6599700790767258, |
|
"grad_norm": 1.790546178817749, |
|
"learning_rate": 8.003203883357113e-06, |
|
"loss": 1.7271, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.6608249625988459, |
|
"grad_norm": 1.8250300884246826, |
|
"learning_rate": 7.9975463039251e-06, |
|
"loss": 1.682, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.661679846120966, |
|
"grad_norm": 1.9827724695205688, |
|
"learning_rate": 7.99188272701795e-06, |
|
"loss": 1.6004, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.6625347296430861, |
|
"grad_norm": 2.0887629985809326, |
|
"learning_rate": 7.986213163967319e-06, |
|
"loss": 1.4962, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6633896131652063, |
|
"grad_norm": 1.8804274797439575, |
|
"learning_rate": 7.980537626116833e-06, |
|
"loss": 1.6097, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.6642444966873263, |
|
"grad_norm": 1.977057695388794, |
|
"learning_rate": 7.974856124822084e-06, |
|
"loss": 1.7107, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.6650993802094465, |
|
"grad_norm": 1.8675955533981323, |
|
"learning_rate": 7.969168671450589e-06, |
|
"loss": 1.5859, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.6659542637315665, |
|
"grad_norm": 1.7858625650405884, |
|
"learning_rate": 7.963475277381776e-06, |
|
"loss": 1.6198, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.6668091472536867, |
|
"grad_norm": 1.965219259262085, |
|
"learning_rate": 7.957775954006958e-06, |
|
"loss": 1.7028, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6676640307758068, |
|
"grad_norm": 2.2767865657806396, |
|
"learning_rate": 7.952070712729312e-06, |
|
"loss": 1.6028, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.6685189142979269, |
|
"grad_norm": 2.385225772857666, |
|
"learning_rate": 7.946359564963852e-06, |
|
"loss": 1.4741, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.669373797820047, |
|
"grad_norm": 1.8306634426116943, |
|
"learning_rate": 7.94064252213742e-06, |
|
"loss": 1.5779, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.6702286813421672, |
|
"grad_norm": 1.881840705871582, |
|
"learning_rate": 7.934919595688639e-06, |
|
"loss": 1.5447, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.6710835648642872, |
|
"grad_norm": 2.4943602085113525, |
|
"learning_rate": 7.929190797067913e-06, |
|
"loss": 1.5807, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6719384483864074, |
|
"grad_norm": 1.7710949182510376, |
|
"learning_rate": 7.923456137737396e-06, |
|
"loss": 1.5805, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.6727933319085274, |
|
"grad_norm": 1.6917473077774048, |
|
"learning_rate": 7.917715629170957e-06, |
|
"loss": 1.5093, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.6736482154306476, |
|
"grad_norm": 2.0921974182128906, |
|
"learning_rate": 7.911969282854184e-06, |
|
"loss": 1.6146, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.6745030989527677, |
|
"grad_norm": 2.2254960536956787, |
|
"learning_rate": 7.90621711028433e-06, |
|
"loss": 1.7173, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.6753579824748878, |
|
"grad_norm": 1.8926316499710083, |
|
"learning_rate": 7.900459122970318e-06, |
|
"loss": 1.6318, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6762128659970079, |
|
"grad_norm": 2.05529522895813, |
|
"learning_rate": 7.894695332432695e-06, |
|
"loss": 1.5956, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.6770677495191281, |
|
"grad_norm": 1.8146142959594727, |
|
"learning_rate": 7.888925750203627e-06, |
|
"loss": 1.6321, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.6779226330412481, |
|
"grad_norm": 2.046912431716919, |
|
"learning_rate": 7.88315038782686e-06, |
|
"loss": 1.429, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.6787775165633683, |
|
"grad_norm": 1.7644606828689575, |
|
"learning_rate": 7.877369256857712e-06, |
|
"loss": 1.6365, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.6796324000854883, |
|
"grad_norm": 2.004549264907837, |
|
"learning_rate": 7.87158236886304e-06, |
|
"loss": 1.4164, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6804872836076085, |
|
"grad_norm": 1.8180807828903198, |
|
"learning_rate": 7.865789735421218e-06, |
|
"loss": 1.7249, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.6813421671297286, |
|
"grad_norm": 1.9642592668533325, |
|
"learning_rate": 7.85999136812212e-06, |
|
"loss": 1.5871, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.6821970506518487, |
|
"grad_norm": 1.7459617853164673, |
|
"learning_rate": 7.854187278567084e-06, |
|
"loss": 1.6564, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.6830519341739688, |
|
"grad_norm": 1.795413851737976, |
|
"learning_rate": 7.848377478368907e-06, |
|
"loss": 1.5304, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.6839068176960889, |
|
"grad_norm": 2.375990629196167, |
|
"learning_rate": 7.842561979151806e-06, |
|
"loss": 1.4079, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.684761701218209, |
|
"grad_norm": 2.3698506355285645, |
|
"learning_rate": 7.836740792551402e-06, |
|
"loss": 1.7428, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.6856165847403292, |
|
"grad_norm": 2.1372928619384766, |
|
"learning_rate": 7.830913930214693e-06, |
|
"loss": 1.5636, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.6864714682624492, |
|
"grad_norm": 1.936754584312439, |
|
"learning_rate": 7.825081403800039e-06, |
|
"loss": 1.6307, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.6873263517845694, |
|
"grad_norm": 1.8657773733139038, |
|
"learning_rate": 7.819243224977125e-06, |
|
"loss": 1.6783, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.6881812353066895, |
|
"grad_norm": 2.2650933265686035, |
|
"learning_rate": 7.813399405426951e-06, |
|
"loss": 1.4729, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6890361188288096, |
|
"grad_norm": 1.953281044960022, |
|
"learning_rate": 7.807549956841799e-06, |
|
"loss": 1.544, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.6898910023509297, |
|
"grad_norm": 2.140019416809082, |
|
"learning_rate": 7.801694890925218e-06, |
|
"loss": 1.411, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.6907458858730497, |
|
"grad_norm": 2.071019411087036, |
|
"learning_rate": 7.795834219391991e-06, |
|
"loss": 1.5566, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.6916007693951699, |
|
"grad_norm": 2.667649269104004, |
|
"learning_rate": 7.78996795396812e-06, |
|
"loss": 1.5058, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.69245565291729, |
|
"grad_norm": 1.9877266883850098, |
|
"learning_rate": 7.7840961063908e-06, |
|
"loss": 1.6074, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6933105364394101, |
|
"grad_norm": 2.031243324279785, |
|
"learning_rate": 7.778218688408391e-06, |
|
"loss": 1.5835, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.6941654199615303, |
|
"grad_norm": 1.9558823108673096, |
|
"learning_rate": 7.772335711780404e-06, |
|
"loss": 1.4158, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.6950203034836504, |
|
"grad_norm": 1.8911411762237549, |
|
"learning_rate": 7.766447188277465e-06, |
|
"loss": 1.6014, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.6958751870057704, |
|
"grad_norm": 2.080173969268799, |
|
"learning_rate": 7.760553129681303e-06, |
|
"loss": 1.7275, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.6967300705278906, |
|
"grad_norm": 2.0678954124450684, |
|
"learning_rate": 7.754653547784718e-06, |
|
"loss": 1.5159, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6975849540500106, |
|
"grad_norm": 3.0976686477661133, |
|
"learning_rate": 7.748748454391565e-06, |
|
"loss": 1.6922, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.6984398375721308, |
|
"grad_norm": 1.6842857599258423, |
|
"learning_rate": 7.742837861316722e-06, |
|
"loss": 1.7449, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.699294721094251, |
|
"grad_norm": 1.756474494934082, |
|
"learning_rate": 7.736921780386077e-06, |
|
"loss": 1.7359, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.700149604616371, |
|
"grad_norm": 1.9712620973587036, |
|
"learning_rate": 7.731000223436491e-06, |
|
"loss": 1.5419, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.7010044881384911, |
|
"grad_norm": 2.3988850116729736, |
|
"learning_rate": 7.725073202315786e-06, |
|
"loss": 1.6395, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7018593716606112, |
|
"grad_norm": 1.8440942764282227, |
|
"learning_rate": 7.719140728882713e-06, |
|
"loss": 1.7084, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.7027142551827313, |
|
"grad_norm": 1.777246356010437, |
|
"learning_rate": 7.713202815006938e-06, |
|
"loss": 1.7737, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.7035691387048515, |
|
"grad_norm": 1.8486806154251099, |
|
"learning_rate": 7.707259472569001e-06, |
|
"loss": 1.6888, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.7044240222269715, |
|
"grad_norm": 1.8133748769760132, |
|
"learning_rate": 7.70131071346032e-06, |
|
"loss": 1.7211, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.7052789057490917, |
|
"grad_norm": 1.8560314178466797, |
|
"learning_rate": 7.695356549583135e-06, |
|
"loss": 1.5751, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7061337892712118, |
|
"grad_norm": 1.7498633861541748, |
|
"learning_rate": 7.689396992850511e-06, |
|
"loss": 1.6105, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7069886727933319, |
|
"grad_norm": 2.0831539630889893, |
|
"learning_rate": 7.683432055186293e-06, |
|
"loss": 1.4881, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.707843556315452, |
|
"grad_norm": 1.8828967809677124, |
|
"learning_rate": 7.677461748525103e-06, |
|
"loss": 1.5878, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7086984398375721, |
|
"grad_norm": 2.1139395236968994, |
|
"learning_rate": 7.671486084812297e-06, |
|
"loss": 1.5399, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7095533233596922, |
|
"grad_norm": 1.7683391571044922, |
|
"learning_rate": 7.66550507600395e-06, |
|
"loss": 1.629, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7104082068818124, |
|
"grad_norm": 2.1067309379577637, |
|
"learning_rate": 7.659518734066836e-06, |
|
"loss": 1.6089, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7112630904039324, |
|
"grad_norm": 2.715237855911255, |
|
"learning_rate": 7.653527070978396e-06, |
|
"loss": 1.7623, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7121179739260526, |
|
"grad_norm": 2.029066562652588, |
|
"learning_rate": 7.64753009872672e-06, |
|
"loss": 1.4605, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7129728574481727, |
|
"grad_norm": 2.040937662124634, |
|
"learning_rate": 7.641527829310516e-06, |
|
"loss": 1.6259, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7138277409702928, |
|
"grad_norm": 1.8791722059249878, |
|
"learning_rate": 7.635520274739097e-06, |
|
"loss": 1.5471, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7146826244924129, |
|
"grad_norm": 1.9070911407470703, |
|
"learning_rate": 7.629507447032346e-06, |
|
"loss": 1.6921, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.715537508014533, |
|
"grad_norm": 1.9708362817764282, |
|
"learning_rate": 7.623489358220696e-06, |
|
"loss": 1.6241, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7163923915366531, |
|
"grad_norm": 2.182068347930908, |
|
"learning_rate": 7.61746602034511e-06, |
|
"loss": 1.7642, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.7172472750587733, |
|
"grad_norm": 1.9108189344406128, |
|
"learning_rate": 7.61143744545705e-06, |
|
"loss": 1.5918, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.7181021585808933, |
|
"grad_norm": 2.457728624343872, |
|
"learning_rate": 7.605403645618459e-06, |
|
"loss": 1.5499, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7189570421030135, |
|
"grad_norm": 2.1395840644836426, |
|
"learning_rate": 7.599364632901731e-06, |
|
"loss": 1.7547, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.7198119256251336, |
|
"grad_norm": 2.3625380992889404, |
|
"learning_rate": 7.593320419389691e-06, |
|
"loss": 1.4591, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.7206668091472537, |
|
"grad_norm": 1.7292894124984741, |
|
"learning_rate": 7.5872710171755725e-06, |
|
"loss": 1.6768, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.7215216926693738, |
|
"grad_norm": 2.7999610900878906, |
|
"learning_rate": 7.581216438362986e-06, |
|
"loss": 1.645, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.7223765761914939, |
|
"grad_norm": 2.0571372509002686, |
|
"learning_rate": 7.575156695065902e-06, |
|
"loss": 1.5302, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.723231459713614, |
|
"grad_norm": 2.070894956588745, |
|
"learning_rate": 7.569091799408624e-06, |
|
"loss": 1.8008, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.7240863432357342, |
|
"grad_norm": 2.6990160942077637, |
|
"learning_rate": 7.5630217635257615e-06, |
|
"loss": 1.4371, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.7249412267578542, |
|
"grad_norm": 1.9898473024368286, |
|
"learning_rate": 7.556946599562216e-06, |
|
"loss": 1.3214, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.7257961102799744, |
|
"grad_norm": 2.095768451690674, |
|
"learning_rate": 7.550866319673139e-06, |
|
"loss": 1.5759, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.7266509938020944, |
|
"grad_norm": 1.862705945968628, |
|
"learning_rate": 7.544780936023926e-06, |
|
"loss": 1.7743, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7275058773242146, |
|
"grad_norm": 1.8628978729248047, |
|
"learning_rate": 7.538690460790179e-06, |
|
"loss": 1.4388, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.7283607608463347, |
|
"grad_norm": 1.8021178245544434, |
|
"learning_rate": 7.532594906157692e-06, |
|
"loss": 1.857, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.7292156443684548, |
|
"grad_norm": 2.5704119205474854, |
|
"learning_rate": 7.526494284322416e-06, |
|
"loss": 1.6302, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.7300705278905749, |
|
"grad_norm": 1.6791504621505737, |
|
"learning_rate": 7.520388607490447e-06, |
|
"loss": 1.7683, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.7309254114126951, |
|
"grad_norm": 3.8832380771636963, |
|
"learning_rate": 7.51427788787799e-06, |
|
"loss": 1.5744, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7317802949348151, |
|
"grad_norm": 3.7836060523986816, |
|
"learning_rate": 7.508162137711341e-06, |
|
"loss": 1.739, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.7326351784569353, |
|
"grad_norm": 3.189974308013916, |
|
"learning_rate": 7.502041369226862e-06, |
|
"loss": 1.7167, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.7334900619790553, |
|
"grad_norm": 2.0712730884552, |
|
"learning_rate": 7.495915594670953e-06, |
|
"loss": 1.5468, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.7343449455011755, |
|
"grad_norm": 1.7717161178588867, |
|
"learning_rate": 7.489784826300033e-06, |
|
"loss": 1.7483, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.7351998290232956, |
|
"grad_norm": 2.4180703163146973, |
|
"learning_rate": 7.483649076380512e-06, |
|
"loss": 1.6412, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7360547125454157, |
|
"grad_norm": 2.175175189971924, |
|
"learning_rate": 7.477508357188769e-06, |
|
"loss": 1.5923, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.7369095960675358, |
|
"grad_norm": 1.7840341329574585, |
|
"learning_rate": 7.47136268101112e-06, |
|
"loss": 1.7442, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.737764479589656, |
|
"grad_norm": 2.045222520828247, |
|
"learning_rate": 7.465212060143802e-06, |
|
"loss": 1.5055, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.738619363111776, |
|
"grad_norm": 1.7603466510772705, |
|
"learning_rate": 7.459056506892946e-06, |
|
"loss": 1.5209, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.7394742466338962, |
|
"grad_norm": 1.823033094406128, |
|
"learning_rate": 7.452896033574552e-06, |
|
"loss": 1.5858, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7403291301560162, |
|
"grad_norm": 1.8318334817886353, |
|
"learning_rate": 7.446730652514464e-06, |
|
"loss": 1.5982, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.7411840136781364, |
|
"grad_norm": 1.8965398073196411, |
|
"learning_rate": 7.4405603760483425e-06, |
|
"loss": 1.4889, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.7420388972002565, |
|
"grad_norm": 1.8694148063659668, |
|
"learning_rate": 7.434385216521645e-06, |
|
"loss": 1.5792, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.7428937807223766, |
|
"grad_norm": 1.8063355684280396, |
|
"learning_rate": 7.428205186289601e-06, |
|
"loss": 1.617, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.7437486642444967, |
|
"grad_norm": 2.0516574382781982, |
|
"learning_rate": 7.42202029771718e-06, |
|
"loss": 1.8309, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7446035477666167, |
|
"grad_norm": 2.1008880138397217, |
|
"learning_rate": 7.415830563179077e-06, |
|
"loss": 1.6068, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7454584312887369, |
|
"grad_norm": 1.826690912246704, |
|
"learning_rate": 7.409635995059679e-06, |
|
"loss": 1.4517, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.746313314810857, |
|
"grad_norm": 2.040496826171875, |
|
"learning_rate": 7.403436605753047e-06, |
|
"loss": 1.5598, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.7471681983329771, |
|
"grad_norm": 1.6369898319244385, |
|
"learning_rate": 7.3972324076628876e-06, |
|
"loss": 1.6301, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7480230818550972, |
|
"grad_norm": 2.6623289585113525, |
|
"learning_rate": 7.391023413202528e-06, |
|
"loss": 1.7157, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7488779653772174, |
|
"grad_norm": 1.9073917865753174, |
|
"learning_rate": 7.384809634794891e-06, |
|
"loss": 1.6098, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7497328488993374, |
|
"grad_norm": 1.9829678535461426, |
|
"learning_rate": 7.378591084872474e-06, |
|
"loss": 1.6042, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.7505877324214576, |
|
"grad_norm": 2.025413990020752, |
|
"learning_rate": 7.372367775877318e-06, |
|
"loss": 1.4811, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.7514426159435776, |
|
"grad_norm": 1.8276697397232056, |
|
"learning_rate": 7.366139720260988e-06, |
|
"loss": 1.4816, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7514426159435776, |
|
"eval_loss": 1.6207854747772217, |
|
"eval_runtime": 535.4228, |
|
"eval_samples_per_second": 7.389, |
|
"eval_steps_per_second": 3.694, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7522974994656978, |
|
"grad_norm": 1.7088907957077026, |
|
"learning_rate": 7.359906930484546e-06, |
|
"loss": 1.667, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.753152382987818, |
|
"grad_norm": 1.966194748878479, |
|
"learning_rate": 7.353669419018525e-06, |
|
"loss": 1.5384, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.754007266509938, |
|
"grad_norm": 1.6948115825653076, |
|
"learning_rate": 7.347427198342907e-06, |
|
"loss": 1.6488, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.7548621500320581, |
|
"grad_norm": 1.7349025011062622, |
|
"learning_rate": 7.3411802809470935e-06, |
|
"loss": 1.3846, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.7557170335541783, |
|
"grad_norm": 2.215358257293701, |
|
"learning_rate": 7.334928679329882e-06, |
|
"loss": 1.419, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.7565719170762983, |
|
"grad_norm": 2.094088315963745, |
|
"learning_rate": 7.328672405999451e-06, |
|
"loss": 1.6216, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7574268005984185, |
|
"grad_norm": 1.7907602787017822, |
|
"learning_rate": 7.3224114734733135e-06, |
|
"loss": 1.5837, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.7582816841205385, |
|
"grad_norm": 1.775529384613037, |
|
"learning_rate": 7.316145894278315e-06, |
|
"loss": 1.7301, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.7591365676426587, |
|
"grad_norm": 1.867698311805725, |
|
"learning_rate": 7.309875680950591e-06, |
|
"loss": 1.6867, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.7599914511647788, |
|
"grad_norm": 1.9435536861419678, |
|
"learning_rate": 7.303600846035556e-06, |
|
"loss": 1.5512, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.7608463346868989, |
|
"grad_norm": 1.9748835563659668, |
|
"learning_rate": 7.297321402087861e-06, |
|
"loss": 1.4949, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.761701218209019, |
|
"grad_norm": 2.0852553844451904, |
|
"learning_rate": 7.291037361671392e-06, |
|
"loss": 1.6525, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.7625561017311391, |
|
"grad_norm": 1.7555006742477417, |
|
"learning_rate": 7.28474873735922e-06, |
|
"loss": 1.8248, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.7634109852532592, |
|
"grad_norm": 2.189694881439209, |
|
"learning_rate": 7.2784555417335935e-06, |
|
"loss": 1.6947, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.7642658687753794, |
|
"grad_norm": 1.8032170534133911, |
|
"learning_rate": 7.272157787385904e-06, |
|
"loss": 1.804, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.7651207522974994, |
|
"grad_norm": 2.116745710372925, |
|
"learning_rate": 7.265855486916668e-06, |
|
"loss": 1.6142, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7659756358196196, |
|
"grad_norm": 1.9036099910736084, |
|
"learning_rate": 7.259548652935495e-06, |
|
"loss": 1.4026, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.7668305193417397, |
|
"grad_norm": 1.8711966276168823, |
|
"learning_rate": 7.253237298061066e-06, |
|
"loss": 1.6825, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.7676854028638598, |
|
"grad_norm": 1.8057702779769897, |
|
"learning_rate": 7.246921434921106e-06, |
|
"loss": 1.5902, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.7685402863859799, |
|
"grad_norm": 1.9403468370437622, |
|
"learning_rate": 7.2406010761523624e-06, |
|
"loss": 1.5219, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.7693951699081, |
|
"grad_norm": 1.751898169517517, |
|
"learning_rate": 7.234276234400577e-06, |
|
"loss": 1.7528, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7702500534302201, |
|
"grad_norm": 1.8883317708969116, |
|
"learning_rate": 7.22794692232046e-06, |
|
"loss": 1.517, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.7711049369523403, |
|
"grad_norm": 2.5952086448669434, |
|
"learning_rate": 7.221613152575664e-06, |
|
"loss": 1.6828, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.7719598204744603, |
|
"grad_norm": 2.416489362716675, |
|
"learning_rate": 7.215274937838768e-06, |
|
"loss": 1.7045, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.7728147039965805, |
|
"grad_norm": 1.994223952293396, |
|
"learning_rate": 7.208932290791232e-06, |
|
"loss": 1.577, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.7736695875187006, |
|
"grad_norm": 2.458573341369629, |
|
"learning_rate": 7.2025852241233975e-06, |
|
"loss": 1.8009, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.7745244710408207, |
|
"grad_norm": 1.8616794347763062, |
|
"learning_rate": 7.1962337505344385e-06, |
|
"loss": 1.5317, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.7753793545629408, |
|
"grad_norm": 2.1416566371917725, |
|
"learning_rate": 7.1898778827323544e-06, |
|
"loss": 1.9288, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.7762342380850609, |
|
"grad_norm": 2.3308708667755127, |
|
"learning_rate": 7.18351763343393e-06, |
|
"loss": 1.4747, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.777089121607181, |
|
"grad_norm": 1.7730233669281006, |
|
"learning_rate": 7.17715301536472e-06, |
|
"loss": 1.7181, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.7779440051293012, |
|
"grad_norm": 2.1546826362609863, |
|
"learning_rate": 7.170784041259018e-06, |
|
"loss": 1.6143, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7787988886514212, |
|
"grad_norm": 1.9400556087493896, |
|
"learning_rate": 7.164410723859837e-06, |
|
"loss": 1.6772, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.7796537721735414, |
|
"grad_norm": 1.798427700996399, |
|
"learning_rate": 7.1580330759188755e-06, |
|
"loss": 1.5083, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.7805086556956614, |
|
"grad_norm": 1.7825255393981934, |
|
"learning_rate": 7.151651110196499e-06, |
|
"loss": 1.6398, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.7813635392177816, |
|
"grad_norm": 2.0335419178009033, |
|
"learning_rate": 7.145264839461712e-06, |
|
"loss": 1.4382, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.7822184227399017, |
|
"grad_norm": 2.093400239944458, |
|
"learning_rate": 7.13887427649213e-06, |
|
"loss": 1.5794, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.7830733062620218, |
|
"grad_norm": 2.1397252082824707, |
|
"learning_rate": 7.132479434073961e-06, |
|
"loss": 1.6183, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.7839281897841419, |
|
"grad_norm": 2.8461811542510986, |
|
"learning_rate": 7.126080325001972e-06, |
|
"loss": 1.531, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.7847830733062621, |
|
"grad_norm": 3.3926584720611572, |
|
"learning_rate": 7.119676962079467e-06, |
|
"loss": 1.341, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.7856379568283821, |
|
"grad_norm": 2.23746395111084, |
|
"learning_rate": 7.113269358118261e-06, |
|
"loss": 1.4222, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.7864928403505023, |
|
"grad_norm": 3.6582369804382324, |
|
"learning_rate": 7.106857525938653e-06, |
|
"loss": 1.5119, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7873477238726223, |
|
"grad_norm": 2.5623018741607666, |
|
"learning_rate": 7.1004414783694086e-06, |
|
"loss": 1.485, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.7882026073947425, |
|
"grad_norm": 2.459796667098999, |
|
"learning_rate": 7.094021228247719e-06, |
|
"loss": 1.3955, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.7890574909168626, |
|
"grad_norm": 1.6993489265441895, |
|
"learning_rate": 7.087596788419188e-06, |
|
"loss": 1.607, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.7899123744389827, |
|
"grad_norm": 1.889883279800415, |
|
"learning_rate": 7.0811681717378e-06, |
|
"loss": 1.6804, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.7907672579611028, |
|
"grad_norm": 2.538088321685791, |
|
"learning_rate": 7.074735391065902e-06, |
|
"loss": 1.3981, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.791622141483223, |
|
"grad_norm": 1.839068055152893, |
|
"learning_rate": 7.068298459274164e-06, |
|
"loss": 1.7209, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.792477025005343, |
|
"grad_norm": 1.996674656867981, |
|
"learning_rate": 7.061857389241571e-06, |
|
"loss": 1.6024, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.7933319085274632, |
|
"grad_norm": 1.8009898662567139, |
|
"learning_rate": 7.055412193855378e-06, |
|
"loss": 1.6207, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.7941867920495832, |
|
"grad_norm": 2.152193307876587, |
|
"learning_rate": 7.0489628860110995e-06, |
|
"loss": 1.5904, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.7950416755717034, |
|
"grad_norm": 1.833527684211731, |
|
"learning_rate": 7.042509478612478e-06, |
|
"loss": 1.6231, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7958965590938235, |
|
"grad_norm": 1.7650495767593384, |
|
"learning_rate": 7.036051984571457e-06, |
|
"loss": 1.7468, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.7967514426159436, |
|
"grad_norm": 1.8895305395126343, |
|
"learning_rate": 7.029590416808159e-06, |
|
"loss": 1.6374, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.7976063261380637, |
|
"grad_norm": 2.057009696960449, |
|
"learning_rate": 7.0231247882508525e-06, |
|
"loss": 1.8619, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.7984612096601837, |
|
"grad_norm": 2.100050687789917, |
|
"learning_rate": 7.016655111835936e-06, |
|
"loss": 1.8066, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.7993160931823039, |
|
"grad_norm": 2.233248233795166, |
|
"learning_rate": 7.010181400507903e-06, |
|
"loss": 1.344, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.800170976704424, |
|
"grad_norm": 3.189713954925537, |
|
"learning_rate": 7.003703667219323e-06, |
|
"loss": 1.6922, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.8010258602265441, |
|
"grad_norm": 2.6081111431121826, |
|
"learning_rate": 6.997221924930811e-06, |
|
"loss": 1.4681, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.8018807437486642, |
|
"grad_norm": 2.012056350708008, |
|
"learning_rate": 6.9907361866110026e-06, |
|
"loss": 1.6555, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.8027356272707844, |
|
"grad_norm": 1.9229539632797241, |
|
"learning_rate": 6.984246465236532e-06, |
|
"loss": 1.6648, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.8035905107929044, |
|
"grad_norm": 2.1851089000701904, |
|
"learning_rate": 6.9777527737919945e-06, |
|
"loss": 1.5953, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8044453943150246, |
|
"grad_norm": 1.743334412574768, |
|
"learning_rate": 6.971255125269941e-06, |
|
"loss": 1.591, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.8053002778371446, |
|
"grad_norm": 2.002300977706909, |
|
"learning_rate": 6.9647535326708274e-06, |
|
"loss": 1.6162, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.8061551613592648, |
|
"grad_norm": 2.2001571655273438, |
|
"learning_rate": 6.958248009003013e-06, |
|
"loss": 1.6559, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.807010044881385, |
|
"grad_norm": 1.9847776889801025, |
|
"learning_rate": 6.951738567282709e-06, |
|
"loss": 1.5787, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.807864928403505, |
|
"grad_norm": 2.376133680343628, |
|
"learning_rate": 6.945225220533977e-06, |
|
"loss": 1.5572, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8087198119256251, |
|
"grad_norm": 2.4176290035247803, |
|
"learning_rate": 6.938707981788685e-06, |
|
"loss": 1.4635, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8095746954477453, |
|
"grad_norm": 1.9553080797195435, |
|
"learning_rate": 6.932186864086493e-06, |
|
"loss": 1.5046, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8104295789698653, |
|
"grad_norm": 1.8396267890930176, |
|
"learning_rate": 6.925661880474818e-06, |
|
"loss": 1.5988, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8112844624919855, |
|
"grad_norm": 2.076810836791992, |
|
"learning_rate": 6.919133044008815e-06, |
|
"loss": 1.5742, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8121393460141055, |
|
"grad_norm": 2.0050506591796875, |
|
"learning_rate": 6.9126003677513435e-06, |
|
"loss": 1.701, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8129942295362257, |
|
"grad_norm": 1.8712939023971558, |
|
"learning_rate": 6.906063864772951e-06, |
|
"loss": 1.7932, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8138491130583458, |
|
"grad_norm": 1.9611555337905884, |
|
"learning_rate": 6.899523548151837e-06, |
|
"loss": 1.6026, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8147039965804659, |
|
"grad_norm": 2.152019500732422, |
|
"learning_rate": 6.892979430973834e-06, |
|
"loss": 1.7248, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.815558880102586, |
|
"grad_norm": 1.851959228515625, |
|
"learning_rate": 6.8864315263323775e-06, |
|
"loss": 1.5555, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8164137636247061, |
|
"grad_norm": 2.021026611328125, |
|
"learning_rate": 6.879879847328483e-06, |
|
"loss": 1.548, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8172686471468262, |
|
"grad_norm": 1.8437577486038208, |
|
"learning_rate": 6.873324407070714e-06, |
|
"loss": 1.5043, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8181235306689464, |
|
"grad_norm": 1.8212240934371948, |
|
"learning_rate": 6.866765218675162e-06, |
|
"loss": 1.5855, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.8189784141910664, |
|
"grad_norm": 1.8274058103561401, |
|
"learning_rate": 6.8602022952654164e-06, |
|
"loss": 1.5723, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.8198332977131866, |
|
"grad_norm": 1.9869805574417114, |
|
"learning_rate": 6.853635649972542e-06, |
|
"loss": 1.5586, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.8206881812353067, |
|
"grad_norm": 1.8559073209762573, |
|
"learning_rate": 6.847065295935047e-06, |
|
"loss": 1.6032, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8215430647574268, |
|
"grad_norm": 1.9623775482177734, |
|
"learning_rate": 6.8404912462988635e-06, |
|
"loss": 1.5828, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.8223979482795469, |
|
"grad_norm": 1.7365514039993286, |
|
"learning_rate": 6.833913514217314e-06, |
|
"loss": 1.7433, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.823252831801667, |
|
"grad_norm": 1.9759365320205688, |
|
"learning_rate": 6.827332112851093e-06, |
|
"loss": 1.616, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.8241077153237871, |
|
"grad_norm": 1.8827824592590332, |
|
"learning_rate": 6.820747055368233e-06, |
|
"loss": 1.5392, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.8249625988459073, |
|
"grad_norm": 2.1110143661499023, |
|
"learning_rate": 6.814158354944084e-06, |
|
"loss": 1.5172, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8258174823680273, |
|
"grad_norm": 1.9254096746444702, |
|
"learning_rate": 6.807566024761282e-06, |
|
"loss": 1.4199, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.8266723658901475, |
|
"grad_norm": 3.650379180908203, |
|
"learning_rate": 6.800970078009728e-06, |
|
"loss": 1.6676, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.8275272494122676, |
|
"grad_norm": 1.826330542564392, |
|
"learning_rate": 6.794370527886558e-06, |
|
"loss": 1.764, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.8283821329343877, |
|
"grad_norm": 1.7885780334472656, |
|
"learning_rate": 6.787767387596118e-06, |
|
"loss": 1.4865, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.8292370164565078, |
|
"grad_norm": 1.9366446733474731, |
|
"learning_rate": 6.781160670349939e-06, |
|
"loss": 1.7136, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8300918999786279, |
|
"grad_norm": 1.978267788887024, |
|
"learning_rate": 6.7745503893667016e-06, |
|
"loss": 1.4599, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.830946783500748, |
|
"grad_norm": 2.08886456489563, |
|
"learning_rate": 6.7679365578722275e-06, |
|
"loss": 1.506, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.8318016670228682, |
|
"grad_norm": 1.8797976970672607, |
|
"learning_rate": 6.761319189099432e-06, |
|
"loss": 1.7517, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.8326565505449882, |
|
"grad_norm": 1.885097861289978, |
|
"learning_rate": 6.754698296288315e-06, |
|
"loss": 1.5816, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.8335114340671084, |
|
"grad_norm": 2.1740825176239014, |
|
"learning_rate": 6.7480738926859234e-06, |
|
"loss": 1.5838, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8343663175892284, |
|
"grad_norm": 1.6993968486785889, |
|
"learning_rate": 6.74144599154633e-06, |
|
"loss": 1.6185, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.8352212011113486, |
|
"grad_norm": 1.847494125366211, |
|
"learning_rate": 6.734814606130605e-06, |
|
"loss": 1.6438, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.8360760846334687, |
|
"grad_norm": 1.6532142162322998, |
|
"learning_rate": 6.728179749706792e-06, |
|
"loss": 1.7054, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.8369309681555888, |
|
"grad_norm": 1.8226687908172607, |
|
"learning_rate": 6.721541435549872e-06, |
|
"loss": 1.4887, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.8377858516777089, |
|
"grad_norm": 3.3552751541137695, |
|
"learning_rate": 6.714899676941755e-06, |
|
"loss": 1.5384, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8386407351998291, |
|
"grad_norm": 2.1226840019226074, |
|
"learning_rate": 6.7082544871712366e-06, |
|
"loss": 1.6166, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.8394956187219491, |
|
"grad_norm": 1.9226521253585815, |
|
"learning_rate": 6.7016058795339765e-06, |
|
"loss": 1.6108, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.8403505022440693, |
|
"grad_norm": 2.033696174621582, |
|
"learning_rate": 6.694953867332473e-06, |
|
"loss": 1.547, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.8412053857661893, |
|
"grad_norm": 2.0900063514709473, |
|
"learning_rate": 6.688298463876042e-06, |
|
"loss": 1.7125, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.8420602692883095, |
|
"grad_norm": 1.6653790473937988, |
|
"learning_rate": 6.681639682480776e-06, |
|
"loss": 1.823, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8429151528104296, |
|
"grad_norm": 1.7410255670547485, |
|
"learning_rate": 6.6749775364695335e-06, |
|
"loss": 1.5678, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.8437700363325497, |
|
"grad_norm": 1.8242837190628052, |
|
"learning_rate": 6.668312039171901e-06, |
|
"loss": 1.7127, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.8446249198546698, |
|
"grad_norm": 1.8510127067565918, |
|
"learning_rate": 6.66164320392417e-06, |
|
"loss": 1.7295, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.84547980337679, |
|
"grad_norm": 1.6781837940216064, |
|
"learning_rate": 6.65497104406931e-06, |
|
"loss": 1.6511, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.84633468689891, |
|
"grad_norm": 1.7391432523727417, |
|
"learning_rate": 6.64829557295695e-06, |
|
"loss": 1.7455, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8471895704210302, |
|
"grad_norm": 1.8074300289154053, |
|
"learning_rate": 6.641616803943331e-06, |
|
"loss": 1.6085, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.8480444539431502, |
|
"grad_norm": 1.731606364250183, |
|
"learning_rate": 6.634934750391305e-06, |
|
"loss": 1.7347, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.8488993374652704, |
|
"grad_norm": 2.0392024517059326, |
|
"learning_rate": 6.628249425670286e-06, |
|
"loss": 1.7722, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.8497542209873905, |
|
"grad_norm": 2.1152353286743164, |
|
"learning_rate": 6.621560843156241e-06, |
|
"loss": 1.6308, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.8506091045095105, |
|
"grad_norm": 2.0195107460021973, |
|
"learning_rate": 6.614869016231648e-06, |
|
"loss": 1.4781, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8514639880316307, |
|
"grad_norm": 1.8734263181686401, |
|
"learning_rate": 6.60817395828548e-06, |
|
"loss": 1.3614, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.8523188715537509, |
|
"grad_norm": 2.6763031482696533, |
|
"learning_rate": 6.601475682713176e-06, |
|
"loss": 1.654, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.8531737550758709, |
|
"grad_norm": 1.8202966451644897, |
|
"learning_rate": 6.594774202916612e-06, |
|
"loss": 1.7154, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.854028638597991, |
|
"grad_norm": 2.3950650691986084, |
|
"learning_rate": 6.58806953230407e-06, |
|
"loss": 1.6501, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.8548835221201111, |
|
"grad_norm": 1.8744430541992188, |
|
"learning_rate": 6.581361684290225e-06, |
|
"loss": 1.7473, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8557384056422312, |
|
"grad_norm": 1.8787147998809814, |
|
"learning_rate": 6.5746506722961e-06, |
|
"loss": 1.6708, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.8565932891643514, |
|
"grad_norm": 1.665242314338684, |
|
"learning_rate": 6.567936509749058e-06, |
|
"loss": 1.695, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.8574481726864714, |
|
"grad_norm": 1.9135111570358276, |
|
"learning_rate": 6.561219210082755e-06, |
|
"loss": 1.5917, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.8583030562085916, |
|
"grad_norm": 1.9543577432632446, |
|
"learning_rate": 6.554498786737136e-06, |
|
"loss": 1.6085, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.8591579397307116, |
|
"grad_norm": 1.9068609476089478, |
|
"learning_rate": 6.547775253158383e-06, |
|
"loss": 1.7286, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8600128232528318, |
|
"grad_norm": 1.743898630142212, |
|
"learning_rate": 6.541048622798912e-06, |
|
"loss": 1.6818, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.860867706774952, |
|
"grad_norm": 2.116328239440918, |
|
"learning_rate": 6.534318909117326e-06, |
|
"loss": 1.6031, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.861722590297072, |
|
"grad_norm": 1.876155138015747, |
|
"learning_rate": 6.527586125578407e-06, |
|
"loss": 1.4219, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.8625774738191921, |
|
"grad_norm": 2.2201461791992188, |
|
"learning_rate": 6.520850285653067e-06, |
|
"loss": 1.5063, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.8634323573413123, |
|
"grad_norm": 2.414156675338745, |
|
"learning_rate": 6.514111402818345e-06, |
|
"loss": 1.6914, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8642872408634323, |
|
"grad_norm": 1.7859731912612915, |
|
"learning_rate": 6.507369490557359e-06, |
|
"loss": 1.7863, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.8651421243855525, |
|
"grad_norm": 1.9573917388916016, |
|
"learning_rate": 6.5006245623592945e-06, |
|
"loss": 1.484, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.8659970079076725, |
|
"grad_norm": 2.2137091159820557, |
|
"learning_rate": 6.493876631719368e-06, |
|
"loss": 1.3974, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.8668518914297927, |
|
"grad_norm": 1.8472415208816528, |
|
"learning_rate": 6.487125712138804e-06, |
|
"loss": 1.4936, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.8677067749519128, |
|
"grad_norm": 1.7957123517990112, |
|
"learning_rate": 6.480371817124809e-06, |
|
"loss": 1.4753, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8685616584740329, |
|
"grad_norm": 1.9084787368774414, |
|
"learning_rate": 6.4736149601905394e-06, |
|
"loss": 1.4901, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.869416541996153, |
|
"grad_norm": 2.067575216293335, |
|
"learning_rate": 6.466855154855081e-06, |
|
"loss": 1.6117, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.8702714255182732, |
|
"grad_norm": 1.971130132675171, |
|
"learning_rate": 6.460092414643417e-06, |
|
"loss": 1.5208, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.8711263090403932, |
|
"grad_norm": 2.3874399662017822, |
|
"learning_rate": 6.453326753086402e-06, |
|
"loss": 1.5572, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.8719811925625134, |
|
"grad_norm": 2.069075584411621, |
|
"learning_rate": 6.446558183720738e-06, |
|
"loss": 1.4789, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8728360760846334, |
|
"grad_norm": 2.126783847808838, |
|
"learning_rate": 6.439786720088942e-06, |
|
"loss": 1.5286, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.8736909596067536, |
|
"grad_norm": 2.263805389404297, |
|
"learning_rate": 6.433012375739326e-06, |
|
"loss": 1.4588, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.8745458431288737, |
|
"grad_norm": 2.54403018951416, |
|
"learning_rate": 6.426235164225959e-06, |
|
"loss": 1.6166, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.8754007266509938, |
|
"grad_norm": 2.790661334991455, |
|
"learning_rate": 6.419455099108656e-06, |
|
"loss": 1.8078, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.8762556101731139, |
|
"grad_norm": 1.7536730766296387, |
|
"learning_rate": 6.412672193952931e-06, |
|
"loss": 1.6686, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.877110493695234, |
|
"grad_norm": 1.9725817441940308, |
|
"learning_rate": 6.40588646232999e-06, |
|
"loss": 1.6277, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.8779653772173541, |
|
"grad_norm": 2.001371383666992, |
|
"learning_rate": 6.3990979178166865e-06, |
|
"loss": 1.7229, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.8788202607394743, |
|
"grad_norm": 2.239229679107666, |
|
"learning_rate": 6.3923065739955074e-06, |
|
"loss": 1.4867, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.8796751442615943, |
|
"grad_norm": 2.3648579120635986, |
|
"learning_rate": 6.385512444454536e-06, |
|
"loss": 1.6231, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.8805300277837145, |
|
"grad_norm": 1.9137940406799316, |
|
"learning_rate": 6.378715542787435e-06, |
|
"loss": 1.5501, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8813849113058346, |
|
"grad_norm": 3.1135456562042236, |
|
"learning_rate": 6.371915882593406e-06, |
|
"loss": 1.521, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.8822397948279547, |
|
"grad_norm": 1.8897255659103394, |
|
"learning_rate": 6.365113477477176e-06, |
|
"loss": 1.7424, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.8830946783500748, |
|
"grad_norm": 1.892958402633667, |
|
"learning_rate": 6.358308341048963e-06, |
|
"loss": 1.6762, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.8839495618721949, |
|
"grad_norm": 1.7024919986724854, |
|
"learning_rate": 6.351500486924447e-06, |
|
"loss": 1.6999, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.884804445394315, |
|
"grad_norm": 1.8556098937988281, |
|
"learning_rate": 6.344689928724749e-06, |
|
"loss": 1.5675, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.8856593289164352, |
|
"grad_norm": 1.7438766956329346, |
|
"learning_rate": 6.337876680076398e-06, |
|
"loss": 1.5188, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.8865142124385552, |
|
"grad_norm": 1.9027405977249146, |
|
"learning_rate": 6.331060754611304e-06, |
|
"loss": 1.6641, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.8873690959606754, |
|
"grad_norm": 1.7773973941802979, |
|
"learning_rate": 6.32424216596674e-06, |
|
"loss": 1.6512, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.8882239794827955, |
|
"grad_norm": 1.840724229812622, |
|
"learning_rate": 6.317420927785298e-06, |
|
"loss": 1.6899, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.8890788630049156, |
|
"grad_norm": 1.785825490951538, |
|
"learning_rate": 6.310597053714881e-06, |
|
"loss": 1.4954, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8899337465270357, |
|
"grad_norm": 1.8115184307098389, |
|
"learning_rate": 6.303770557408657e-06, |
|
"loss": 1.6515, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.8907886300491558, |
|
"grad_norm": 2.2055771350860596, |
|
"learning_rate": 6.296941452525048e-06, |
|
"loss": 1.6381, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.8916435135712759, |
|
"grad_norm": 1.7307052612304688, |
|
"learning_rate": 6.290109752727687e-06, |
|
"loss": 1.5122, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.8924983970933961, |
|
"grad_norm": 1.7290875911712646, |
|
"learning_rate": 6.283275471685408e-06, |
|
"loss": 1.7985, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.8933532806155161, |
|
"grad_norm": 2.136174440383911, |
|
"learning_rate": 6.276438623072204e-06, |
|
"loss": 1.5584, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.8942081641376363, |
|
"grad_norm": 1.942376732826233, |
|
"learning_rate": 6.269599220567205e-06, |
|
"loss": 1.6992, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.8950630476597563, |
|
"grad_norm": 1.8792293071746826, |
|
"learning_rate": 6.262757277854654e-06, |
|
"loss": 1.4932, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.8959179311818765, |
|
"grad_norm": 2.496633768081665, |
|
"learning_rate": 6.2559128086238715e-06, |
|
"loss": 1.8053, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.8967728147039966, |
|
"grad_norm": 2.0031940937042236, |
|
"learning_rate": 6.24906582656924e-06, |
|
"loss": 1.6923, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.8976276982261167, |
|
"grad_norm": 1.8933215141296387, |
|
"learning_rate": 6.242216345390164e-06, |
|
"loss": 1.4336, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8984825817482368, |
|
"grad_norm": 2.275301456451416, |
|
"learning_rate": 6.23536437879105e-06, |
|
"loss": 1.6945, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.899337465270357, |
|
"grad_norm": 2.035141944885254, |
|
"learning_rate": 6.228509940481278e-06, |
|
"loss": 1.5628, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.900192348792477, |
|
"grad_norm": 2.141824960708618, |
|
"learning_rate": 6.221653044175171e-06, |
|
"loss": 1.5929, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.9010472323145972, |
|
"grad_norm": 2.2444586753845215, |
|
"learning_rate": 6.2147937035919734e-06, |
|
"loss": 1.5017, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.9019021158367172, |
|
"grad_norm": 1.9875540733337402, |
|
"learning_rate": 6.207931932455818e-06, |
|
"loss": 1.5285, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9027569993588374, |
|
"grad_norm": 2.033687114715576, |
|
"learning_rate": 6.2010677444957e-06, |
|
"loss": 1.5152, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.9036118828809575, |
|
"grad_norm": 2.0552401542663574, |
|
"learning_rate": 6.194201153445451e-06, |
|
"loss": 1.6341, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.9044667664030775, |
|
"grad_norm": 2.3873775005340576, |
|
"learning_rate": 6.187332173043714e-06, |
|
"loss": 1.4909, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.9053216499251977, |
|
"grad_norm": 2.0585362911224365, |
|
"learning_rate": 6.180460817033905e-06, |
|
"loss": 1.7219, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.9061765334473179, |
|
"grad_norm": 2.2529118061065674, |
|
"learning_rate": 6.173587099164201e-06, |
|
"loss": 1.5878, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9070314169694379, |
|
"grad_norm": 1.8679263591766357, |
|
"learning_rate": 6.1667110331875e-06, |
|
"loss": 1.6209, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.907886300491558, |
|
"grad_norm": 1.7941343784332275, |
|
"learning_rate": 6.159832632861399e-06, |
|
"loss": 1.5526, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.9087411840136781, |
|
"grad_norm": 1.9600245952606201, |
|
"learning_rate": 6.152951911948165e-06, |
|
"loss": 1.3754, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.9095960675357982, |
|
"grad_norm": 1.9597649574279785, |
|
"learning_rate": 6.14606888421471e-06, |
|
"loss": 1.5851, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9104509510579184, |
|
"grad_norm": 2.3222103118896484, |
|
"learning_rate": 6.1391835634325605e-06, |
|
"loss": 1.7639, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9113058345800384, |
|
"grad_norm": 1.9706284999847412, |
|
"learning_rate": 6.1322959633778305e-06, |
|
"loss": 1.5823, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9121607181021586, |
|
"grad_norm": 1.8868184089660645, |
|
"learning_rate": 6.125406097831194e-06, |
|
"loss": 1.6515, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9130156016242786, |
|
"grad_norm": 1.833493709564209, |
|
"learning_rate": 6.118513980577858e-06, |
|
"loss": 1.55, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9138704851463988, |
|
"grad_norm": 1.917563557624817, |
|
"learning_rate": 6.111619625407536e-06, |
|
"loss": 1.6528, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.914725368668519, |
|
"grad_norm": 1.8444433212280273, |
|
"learning_rate": 6.104723046114418e-06, |
|
"loss": 1.7479, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.915580252190639, |
|
"grad_norm": 2.0832955837249756, |
|
"learning_rate": 6.097824256497145e-06, |
|
"loss": 1.6926, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9164351357127591, |
|
"grad_norm": 1.9241522550582886, |
|
"learning_rate": 6.090923270358777e-06, |
|
"loss": 1.4728, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.9172900192348793, |
|
"grad_norm": 1.7187578678131104, |
|
"learning_rate": 6.0840201015067735e-06, |
|
"loss": 1.743, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9181449027569993, |
|
"grad_norm": 2.5005061626434326, |
|
"learning_rate": 6.077114763752958e-06, |
|
"loss": 1.4749, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9189997862791195, |
|
"grad_norm": 2.1376984119415283, |
|
"learning_rate": 6.070207270913495e-06, |
|
"loss": 1.7223, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9198546698012395, |
|
"grad_norm": 2.73537278175354, |
|
"learning_rate": 6.063297636808862e-06, |
|
"loss": 1.4193, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.9207095533233597, |
|
"grad_norm": 1.7628402709960938, |
|
"learning_rate": 6.056385875263816e-06, |
|
"loss": 1.7398, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.9215644368454798, |
|
"grad_norm": 1.917742133140564, |
|
"learning_rate": 6.049472000107376e-06, |
|
"loss": 1.6026, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.9224193203675999, |
|
"grad_norm": 1.6675467491149902, |
|
"learning_rate": 6.042556025172787e-06, |
|
"loss": 1.6473, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.92327420388972, |
|
"grad_norm": 1.7674387693405151, |
|
"learning_rate": 6.035637964297496e-06, |
|
"loss": 1.5639, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9241290874118402, |
|
"grad_norm": 1.7522715330123901, |
|
"learning_rate": 6.028717831323123e-06, |
|
"loss": 1.7759, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.9249839709339602, |
|
"grad_norm": 2.054579496383667, |
|
"learning_rate": 6.021795640095434e-06, |
|
"loss": 1.849, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.9258388544560804, |
|
"grad_norm": 2.0761284828186035, |
|
"learning_rate": 6.014871404464316e-06, |
|
"loss": 1.8344, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.9266937379782004, |
|
"grad_norm": 1.839158058166504, |
|
"learning_rate": 6.0079451382837405e-06, |
|
"loss": 1.7051, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.9275486215003206, |
|
"grad_norm": 2.337629795074463, |
|
"learning_rate": 6.0010168554117474e-06, |
|
"loss": 1.5102, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.9284035050224407, |
|
"grad_norm": 2.650710105895996, |
|
"learning_rate": 5.9940865697104055e-06, |
|
"loss": 1.5373, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.9292583885445608, |
|
"grad_norm": 3.5047664642333984, |
|
"learning_rate": 5.987154295045801e-06, |
|
"loss": 1.6132, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.9301132720666809, |
|
"grad_norm": 1.9641162157058716, |
|
"learning_rate": 5.980220045287986e-06, |
|
"loss": 1.5715, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.930968155588801, |
|
"grad_norm": 2.2403674125671387, |
|
"learning_rate": 5.973283834310978e-06, |
|
"loss": 1.6544, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.9318230391109211, |
|
"grad_norm": 1.8391705751419067, |
|
"learning_rate": 5.966345675992707e-06, |
|
"loss": 1.6286, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9326779226330413, |
|
"grad_norm": 1.8932671546936035, |
|
"learning_rate": 5.959405584215007e-06, |
|
"loss": 1.5434, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.9335328061551613, |
|
"grad_norm": 2.0733065605163574, |
|
"learning_rate": 5.952463572863577e-06, |
|
"loss": 1.4035, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.9343876896772815, |
|
"grad_norm": 1.7531025409698486, |
|
"learning_rate": 5.945519655827957e-06, |
|
"loss": 1.6864, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.9352425731994016, |
|
"grad_norm": 1.679069995880127, |
|
"learning_rate": 5.938573847001502e-06, |
|
"loss": 1.7121, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.9360974567215217, |
|
"grad_norm": 1.8652360439300537, |
|
"learning_rate": 5.931626160281347e-06, |
|
"loss": 1.6491, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9369523402436418, |
|
"grad_norm": 1.819766640663147, |
|
"learning_rate": 5.924676609568392e-06, |
|
"loss": 1.5042, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.9378072237657619, |
|
"grad_norm": 1.9549850225448608, |
|
"learning_rate": 5.917725208767259e-06, |
|
"loss": 1.7763, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.938662107287882, |
|
"grad_norm": 2.392908811569214, |
|
"learning_rate": 5.9107719717862756e-06, |
|
"loss": 1.6243, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.9395169908100022, |
|
"grad_norm": 1.7446740865707397, |
|
"learning_rate": 5.903816912537444e-06, |
|
"loss": 1.7163, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.9403718743321222, |
|
"grad_norm": 1.9705148935317993, |
|
"learning_rate": 5.896860044936406e-06, |
|
"loss": 1.6139, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9412267578542424, |
|
"grad_norm": 1.883777379989624, |
|
"learning_rate": 5.889901382902432e-06, |
|
"loss": 1.6398, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.9420816413763625, |
|
"grad_norm": 2.556436777114868, |
|
"learning_rate": 5.882940940358373e-06, |
|
"loss": 1.4316, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.9429365248984826, |
|
"grad_norm": 1.8999799489974976, |
|
"learning_rate": 5.875978731230648e-06, |
|
"loss": 1.6339, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.9437914084206027, |
|
"grad_norm": 2.2275121212005615, |
|
"learning_rate": 5.869014769449208e-06, |
|
"loss": 1.3929, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.9446462919427228, |
|
"grad_norm": 1.9651514291763306, |
|
"learning_rate": 5.862049068947516e-06, |
|
"loss": 1.5237, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9455011754648429, |
|
"grad_norm": 1.848996877670288, |
|
"learning_rate": 5.855081643662504e-06, |
|
"loss": 1.5521, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.9463560589869631, |
|
"grad_norm": 1.80025053024292, |
|
"learning_rate": 5.848112507534564e-06, |
|
"loss": 1.5609, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.9472109425090831, |
|
"grad_norm": 2.1637113094329834, |
|
"learning_rate": 5.841141674507504e-06, |
|
"loss": 1.5524, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.9480658260312033, |
|
"grad_norm": 2.1026487350463867, |
|
"learning_rate": 5.834169158528534e-06, |
|
"loss": 1.398, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.9489207095533233, |
|
"grad_norm": 2.1661503314971924, |
|
"learning_rate": 5.827194973548227e-06, |
|
"loss": 1.5773, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9497755930754435, |
|
"grad_norm": 1.7545636892318726, |
|
"learning_rate": 5.820219133520495e-06, |
|
"loss": 1.691, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.9506304765975636, |
|
"grad_norm": 2.0873961448669434, |
|
"learning_rate": 5.813241652402564e-06, |
|
"loss": 1.4826, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.9514853601196837, |
|
"grad_norm": 1.7167503833770752, |
|
"learning_rate": 5.806262544154941e-06, |
|
"loss": 1.7625, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.9523402436418038, |
|
"grad_norm": 2.3546254634857178, |
|
"learning_rate": 5.799281822741392e-06, |
|
"loss": 1.3234, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.953195127163924, |
|
"grad_norm": 1.9529823064804077, |
|
"learning_rate": 5.792299502128906e-06, |
|
"loss": 1.5489, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.954050010686044, |
|
"grad_norm": 1.9048004150390625, |
|
"learning_rate": 5.785315596287675e-06, |
|
"loss": 1.6327, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.9549048942081642, |
|
"grad_norm": 2.040055751800537, |
|
"learning_rate": 5.778330119191063e-06, |
|
"loss": 1.6707, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.9557597777302842, |
|
"grad_norm": 1.817842960357666, |
|
"learning_rate": 5.771343084815571e-06, |
|
"loss": 1.4015, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.9566146612524044, |
|
"grad_norm": 1.8240433931350708, |
|
"learning_rate": 5.7643545071408265e-06, |
|
"loss": 1.6692, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.9574695447745245, |
|
"grad_norm": 1.931836485862732, |
|
"learning_rate": 5.7573644001495375e-06, |
|
"loss": 1.5152, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9583244282966445, |
|
"grad_norm": 2.3037421703338623, |
|
"learning_rate": 5.7503727778274765e-06, |
|
"loss": 1.6528, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.9591793118187647, |
|
"grad_norm": 1.6994976997375488, |
|
"learning_rate": 5.743379654163441e-06, |
|
"loss": 1.7431, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.9600341953408849, |
|
"grad_norm": 2.0386931896209717, |
|
"learning_rate": 5.736385043149242e-06, |
|
"loss": 1.543, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.9608890788630049, |
|
"grad_norm": 1.81898832321167, |
|
"learning_rate": 5.729388958779654e-06, |
|
"loss": 1.7186, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.961743962385125, |
|
"grad_norm": 1.7468756437301636, |
|
"learning_rate": 5.722391415052413e-06, |
|
"loss": 1.7129, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9625988459072451, |
|
"grad_norm": 2.1387693881988525, |
|
"learning_rate": 5.715392425968165e-06, |
|
"loss": 1.7084, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.9634537294293652, |
|
"grad_norm": 1.9089514017105103, |
|
"learning_rate": 5.708392005530452e-06, |
|
"loss": 1.3784, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.9643086129514854, |
|
"grad_norm": 1.9345386028289795, |
|
"learning_rate": 5.7013901677456784e-06, |
|
"loss": 1.4172, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.9651634964736054, |
|
"grad_norm": 1.9413198232650757, |
|
"learning_rate": 5.694386926623085e-06, |
|
"loss": 1.7237, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.9660183799957256, |
|
"grad_norm": 1.9111895561218262, |
|
"learning_rate": 5.687382296174722e-06, |
|
"loss": 1.5112, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9668732635178456, |
|
"grad_norm": 1.9282513856887817, |
|
"learning_rate": 5.6803762904154155e-06, |
|
"loss": 1.6268, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.9677281470399658, |
|
"grad_norm": 1.959633469581604, |
|
"learning_rate": 5.6733689233627476e-06, |
|
"loss": 1.5489, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.968583030562086, |
|
"grad_norm": 1.8871335983276367, |
|
"learning_rate": 5.666360209037021e-06, |
|
"loss": 1.623, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.969437914084206, |
|
"grad_norm": 1.9780254364013672, |
|
"learning_rate": 5.659350161461234e-06, |
|
"loss": 1.5818, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.9702927976063261, |
|
"grad_norm": 4.364112377166748, |
|
"learning_rate": 5.6523387946610575e-06, |
|
"loss": 1.7556, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.9711476811284463, |
|
"grad_norm": 1.9129712581634521, |
|
"learning_rate": 5.645326122664793e-06, |
|
"loss": 1.6293, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.9720025646505663, |
|
"grad_norm": 3.172373056411743, |
|
"learning_rate": 5.638312159503361e-06, |
|
"loss": 1.6633, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.9728574481726865, |
|
"grad_norm": 1.857598066329956, |
|
"learning_rate": 5.631296919210261e-06, |
|
"loss": 1.4858, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.9737123316948065, |
|
"grad_norm": 1.9570964574813843, |
|
"learning_rate": 5.624280415821553e-06, |
|
"loss": 1.6073, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.9745672152169267, |
|
"grad_norm": 2.047363758087158, |
|
"learning_rate": 5.617262663375815e-06, |
|
"loss": 1.4474, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9754220987390468, |
|
"grad_norm": 1.8771976232528687, |
|
"learning_rate": 5.610243675914138e-06, |
|
"loss": 1.3981, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.9762769822611669, |
|
"grad_norm": 1.9068803787231445, |
|
"learning_rate": 5.603223467480067e-06, |
|
"loss": 1.6599, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.977131865783287, |
|
"grad_norm": 2.1292192935943604, |
|
"learning_rate": 5.5962020521196046e-06, |
|
"loss": 1.5937, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.9779867493054072, |
|
"grad_norm": 2.0215260982513428, |
|
"learning_rate": 5.58917944388116e-06, |
|
"loss": 1.4024, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.9788416328275272, |
|
"grad_norm": 1.7946442365646362, |
|
"learning_rate": 5.582155656815531e-06, |
|
"loss": 1.5434, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.9796965163496474, |
|
"grad_norm": 2.0026910305023193, |
|
"learning_rate": 5.575130704975877e-06, |
|
"loss": 1.5286, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.9805513998717674, |
|
"grad_norm": 1.9159256219863892, |
|
"learning_rate": 5.568104602417682e-06, |
|
"loss": 1.565, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.9814062833938876, |
|
"grad_norm": 1.912210464477539, |
|
"learning_rate": 5.561077363198738e-06, |
|
"loss": 1.4228, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.9822611669160077, |
|
"grad_norm": 1.8582370281219482, |
|
"learning_rate": 5.554049001379108e-06, |
|
"loss": 1.529, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.9831160504381278, |
|
"grad_norm": 1.9566824436187744, |
|
"learning_rate": 5.5470195310211015e-06, |
|
"loss": 1.6577, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9839709339602479, |
|
"grad_norm": 2.0170326232910156, |
|
"learning_rate": 5.539988966189248e-06, |
|
"loss": 1.6307, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.9848258174823681, |
|
"grad_norm": 1.9601041078567505, |
|
"learning_rate": 5.532957320950264e-06, |
|
"loss": 1.6092, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.9856807010044881, |
|
"grad_norm": 1.9974137544631958, |
|
"learning_rate": 5.5259246093730305e-06, |
|
"loss": 1.6367, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.9865355845266083, |
|
"grad_norm": 2.220414161682129, |
|
"learning_rate": 5.5188908455285565e-06, |
|
"loss": 1.5377, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.9873904680487283, |
|
"grad_norm": 2.020720958709717, |
|
"learning_rate": 5.511856043489965e-06, |
|
"loss": 1.6013, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.9882453515708485, |
|
"grad_norm": 1.8889501094818115, |
|
"learning_rate": 5.504820217332447e-06, |
|
"loss": 1.689, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.9891002350929686, |
|
"grad_norm": 1.811374306678772, |
|
"learning_rate": 5.4977833811332525e-06, |
|
"loss": 1.4317, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.9899551186150887, |
|
"grad_norm": 2.801724910736084, |
|
"learning_rate": 5.490745548971644e-06, |
|
"loss": 1.7369, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.9908100021372088, |
|
"grad_norm": 2.1599671840667725, |
|
"learning_rate": 5.483706734928877e-06, |
|
"loss": 1.5232, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.9916648856593289, |
|
"grad_norm": 2.0417938232421875, |
|
"learning_rate": 5.476666953088179e-06, |
|
"loss": 1.505, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.992519769181449, |
|
"grad_norm": 2.6418049335479736, |
|
"learning_rate": 5.469626217534707e-06, |
|
"loss": 1.6385, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.9933746527035692, |
|
"grad_norm": 1.836726427078247, |
|
"learning_rate": 5.462584542355528e-06, |
|
"loss": 1.6968, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.9942295362256892, |
|
"grad_norm": 4.359468936920166, |
|
"learning_rate": 5.45554194163959e-06, |
|
"loss": 1.4797, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.9950844197478094, |
|
"grad_norm": 2.071335554122925, |
|
"learning_rate": 5.44849842947769e-06, |
|
"loss": 1.4227, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.9959393032699295, |
|
"grad_norm": 1.8535865545272827, |
|
"learning_rate": 5.4414540199624536e-06, |
|
"loss": 1.4956, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.9967941867920496, |
|
"grad_norm": 2.3737199306488037, |
|
"learning_rate": 5.434408727188297e-06, |
|
"loss": 1.5244, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.9976490703141697, |
|
"grad_norm": 1.9199711084365845, |
|
"learning_rate": 5.427362565251407e-06, |
|
"loss": 1.5126, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.9985039538362898, |
|
"grad_norm": 2.217866897583008, |
|
"learning_rate": 5.4203155482497075e-06, |
|
"loss": 1.6553, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.9993588373584099, |
|
"grad_norm": 2.2763781547546387, |
|
"learning_rate": 5.413267690282832e-06, |
|
"loss": 1.5742, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.6807893514633179, |
|
"learning_rate": 5.4062190054521e-06, |
|
"loss": 1.1283, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.00085488352212, |
|
"grad_norm": 1.7158795595169067, |
|
"learning_rate": 5.399169507860484e-06, |
|
"loss": 1.5498, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.0017097670442403, |
|
"grad_norm": 2.0779738426208496, |
|
"learning_rate": 5.392119211612582e-06, |
|
"loss": 1.3149, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.0017097670442403, |
|
"eval_loss": 1.6148672103881836, |
|
"eval_runtime": 534.1841, |
|
"eval_samples_per_second": 7.406, |
|
"eval_steps_per_second": 3.703, |
|
"step": 1172 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2338, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 293, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1784816724150845e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|