|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9796616612811253, |
|
"eval_steps": 500, |
|
"global_step": 1314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001520623455616803, |
|
"grad_norm": 3.80404000128578, |
|
"learning_rate": 9.033333333333334e-07, |
|
"loss": 1.3234, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003041246911233606, |
|
"grad_norm": 3.7593589398056593, |
|
"learning_rate": 1.8066666666666668e-06, |
|
"loss": 1.2935, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0045618703668504085, |
|
"grad_norm": 3.5532664758635017, |
|
"learning_rate": 2.7100000000000003e-06, |
|
"loss": 1.2481, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006082493822467212, |
|
"grad_norm": 3.48298105333948, |
|
"learning_rate": 3.6133333333333336e-06, |
|
"loss": 1.2786, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0076031172780840145, |
|
"grad_norm": 2.7761816462967768, |
|
"learning_rate": 4.516666666666667e-06, |
|
"loss": 1.233, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009123740733700817, |
|
"grad_norm": 1.7543427633433306, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 1.2459, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01064436418931762, |
|
"grad_norm": 1.6976304839039194, |
|
"learning_rate": 6.3233333333333335e-06, |
|
"loss": 1.1905, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.012164987644934424, |
|
"grad_norm": 3.06752449258415, |
|
"learning_rate": 7.226666666666667e-06, |
|
"loss": 1.1417, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013685611100551226, |
|
"grad_norm": 3.445950798936361, |
|
"learning_rate": 8.13e-06, |
|
"loss": 1.1746, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.015206234556168029, |
|
"grad_norm": 3.088218157818855, |
|
"learning_rate": 9.033333333333334e-06, |
|
"loss": 1.1209, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01672685801178483, |
|
"grad_norm": 3.3669650804349973, |
|
"learning_rate": 9.936666666666666e-06, |
|
"loss": 1.138, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.018247481467401634, |
|
"grad_norm": 3.1041727231133174, |
|
"learning_rate": 1.0840000000000001e-05, |
|
"loss": 1.1188, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.019768104923018438, |
|
"grad_norm": 2.21205722327462, |
|
"learning_rate": 1.1743333333333335e-05, |
|
"loss": 1.0873, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02128872837863524, |
|
"grad_norm": 1.32813912161683, |
|
"learning_rate": 1.2646666666666667e-05, |
|
"loss": 1.1163, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.022809351834252044, |
|
"grad_norm": 2.3535108175852097, |
|
"learning_rate": 1.355e-05, |
|
"loss": 1.1189, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.024329975289868848, |
|
"grad_norm": 2.8032523790378274, |
|
"learning_rate": 1.4453333333333334e-05, |
|
"loss": 1.0621, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025850598745485648, |
|
"grad_norm": 1.8636564282922823, |
|
"learning_rate": 1.5356666666666668e-05, |
|
"loss": 1.0059, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02737122220110245, |
|
"grad_norm": 1.198430471743954, |
|
"learning_rate": 1.626e-05, |
|
"loss": 1.0572, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.028891845656719255, |
|
"grad_norm": 1.3874306913597059, |
|
"learning_rate": 1.7163333333333332e-05, |
|
"loss": 1.024, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.030412469112336058, |
|
"grad_norm": 1.1392396226185426, |
|
"learning_rate": 1.8066666666666668e-05, |
|
"loss": 1.0426, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03193309256795286, |
|
"grad_norm": 0.7881813145609246, |
|
"learning_rate": 1.897e-05, |
|
"loss": 1.0382, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03345371602356966, |
|
"grad_norm": 1.0470837153138448, |
|
"learning_rate": 1.987333333333333e-05, |
|
"loss": 0.9916, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03497433947918647, |
|
"grad_norm": 0.9265643204523712, |
|
"learning_rate": 2.077666666666667e-05, |
|
"loss": 1.0348, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03649496293480327, |
|
"grad_norm": 0.8620032399568974, |
|
"learning_rate": 2.1680000000000002e-05, |
|
"loss": 0.9893, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.038015586390420075, |
|
"grad_norm": 0.963710426135712, |
|
"learning_rate": 2.2583333333333335e-05, |
|
"loss": 0.9931, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.039536209846036875, |
|
"grad_norm": 0.7104087725888792, |
|
"learning_rate": 2.348666666666667e-05, |
|
"loss": 0.9723, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.041056833301653675, |
|
"grad_norm": 0.8206318828097199, |
|
"learning_rate": 2.4390000000000002e-05, |
|
"loss": 1.0048, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04257745675727048, |
|
"grad_norm": 0.88460359130528, |
|
"learning_rate": 2.5293333333333334e-05, |
|
"loss": 0.9686, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04409808021288728, |
|
"grad_norm": 0.6842646410238233, |
|
"learning_rate": 2.619666666666667e-05, |
|
"loss": 0.9942, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04561870366850409, |
|
"grad_norm": 0.9420397284384021, |
|
"learning_rate": 2.71e-05, |
|
"loss": 0.9619, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04713932712412089, |
|
"grad_norm": 0.7311688429092981, |
|
"learning_rate": 2.7099959441789884e-05, |
|
"loss": 0.9722, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.048659950579737696, |
|
"grad_norm": 0.8897905291941917, |
|
"learning_rate": 2.7099837767402332e-05, |
|
"loss": 0.9767, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.050180574035354496, |
|
"grad_norm": 0.7198270553694668, |
|
"learning_rate": 2.709963497756574e-05, |
|
"loss": 0.9209, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.051701197490971296, |
|
"grad_norm": 0.6437197076975016, |
|
"learning_rate": 2.7099351073494104e-05, |
|
"loss": 0.954, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0532218209465881, |
|
"grad_norm": 0.6539693647049317, |
|
"learning_rate": 2.7098986056886998e-05, |
|
"loss": 0.9281, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0547424444022049, |
|
"grad_norm": 0.7438647990016182, |
|
"learning_rate": 2.7098539929929583e-05, |
|
"loss": 0.9198, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05626306785782171, |
|
"grad_norm": 0.6710026252513194, |
|
"learning_rate": 2.7098012695292568e-05, |
|
"loss": 0.9535, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05778369131343851, |
|
"grad_norm": 0.7336846915639782, |
|
"learning_rate": 2.709740435613222e-05, |
|
"loss": 0.9188, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05930431476905531, |
|
"grad_norm": 0.7107571686807039, |
|
"learning_rate": 2.709671491609034e-05, |
|
"loss": 0.9464, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.060824938224672116, |
|
"grad_norm": 0.6815590773111158, |
|
"learning_rate": 2.7095944379294215e-05, |
|
"loss": 0.9652, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.062345561680288916, |
|
"grad_norm": 0.7639644922873411, |
|
"learning_rate": 2.709509275035663e-05, |
|
"loss": 0.9305, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06386618513590572, |
|
"grad_norm": 0.6137196824616831, |
|
"learning_rate": 2.709416003437583e-05, |
|
"loss": 0.8947, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06538680859152253, |
|
"grad_norm": 0.730883182696482, |
|
"learning_rate": 2.709314623693546e-05, |
|
"loss": 0.9284, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06690743204713932, |
|
"grad_norm": 0.6378775889130142, |
|
"learning_rate": 2.7092051364104584e-05, |
|
"loss": 0.9319, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06842805550275613, |
|
"grad_norm": 0.6881392029525518, |
|
"learning_rate": 2.709087542243759e-05, |
|
"loss": 0.9561, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06994867895837294, |
|
"grad_norm": 0.7653476020607418, |
|
"learning_rate": 2.708961841897421e-05, |
|
"loss": 0.9147, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07146930241398973, |
|
"grad_norm": 0.6329992439002645, |
|
"learning_rate": 2.7088280361239425e-05, |
|
"loss": 0.9097, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07298992586960654, |
|
"grad_norm": 0.7176283996505511, |
|
"learning_rate": 2.7086861257243455e-05, |
|
"loss": 0.923, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07451054932522334, |
|
"grad_norm": 0.5868244553340308, |
|
"learning_rate": 2.7085361115481697e-05, |
|
"loss": 0.9099, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07603117278084015, |
|
"grad_norm": 0.6598413586849197, |
|
"learning_rate": 2.7083779944934685e-05, |
|
"loss": 0.9198, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07755179623645694, |
|
"grad_norm": 0.5578106414522218, |
|
"learning_rate": 2.7082117755068008e-05, |
|
"loss": 0.8798, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07907241969207375, |
|
"grad_norm": 0.5382560015493899, |
|
"learning_rate": 2.708037455583229e-05, |
|
"loss": 0.9423, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08059304314769056, |
|
"grad_norm": 0.7104523737465288, |
|
"learning_rate": 2.7078550357663116e-05, |
|
"loss": 0.8525, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08211366660330735, |
|
"grad_norm": 0.6283073524726329, |
|
"learning_rate": 2.7076645171480954e-05, |
|
"loss": 0.8691, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08363429005892416, |
|
"grad_norm": 0.6650416502799579, |
|
"learning_rate": 2.7074659008691105e-05, |
|
"loss": 0.867, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08515491351454096, |
|
"grad_norm": 0.7744652466782505, |
|
"learning_rate": 2.707259188118364e-05, |
|
"loss": 0.9067, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08667553697015777, |
|
"grad_norm": 0.8615866504032589, |
|
"learning_rate": 2.7070443801333323e-05, |
|
"loss": 0.888, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08819616042577456, |
|
"grad_norm": 0.7552923225662669, |
|
"learning_rate": 2.706821478199952e-05, |
|
"loss": 0.9394, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08971678388139137, |
|
"grad_norm": 0.8827458802779125, |
|
"learning_rate": 2.706590483652616e-05, |
|
"loss": 0.9333, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09123740733700818, |
|
"grad_norm": 0.5735989763886916, |
|
"learning_rate": 2.7063513978741612e-05, |
|
"loss": 0.9045, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09275803079262497, |
|
"grad_norm": 0.935081852925912, |
|
"learning_rate": 2.706104222295863e-05, |
|
"loss": 0.9103, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09427865424824178, |
|
"grad_norm": 0.974887379533721, |
|
"learning_rate": 2.7058489583974263e-05, |
|
"loss": 0.8588, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.09579927770385858, |
|
"grad_norm": 0.8802808107178965, |
|
"learning_rate": 2.7055856077069762e-05, |
|
"loss": 0.9196, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09731990115947539, |
|
"grad_norm": 0.7021620798712934, |
|
"learning_rate": 2.7053141718010486e-05, |
|
"loss": 0.891, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09884052461509218, |
|
"grad_norm": 1.088284062839966, |
|
"learning_rate": 2.7050346523045816e-05, |
|
"loss": 0.8944, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10036114807070899, |
|
"grad_norm": 0.6749292391517889, |
|
"learning_rate": 2.7047470508909053e-05, |
|
"loss": 0.8815, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1018817715263258, |
|
"grad_norm": 0.8723921836119483, |
|
"learning_rate": 2.704451369281731e-05, |
|
"loss": 0.859, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.10340239498194259, |
|
"grad_norm": 0.7760246702094213, |
|
"learning_rate": 2.7041476092471437e-05, |
|
"loss": 0.9184, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1049230184375594, |
|
"grad_norm": 0.7139493353356939, |
|
"learning_rate": 2.7038357726055864e-05, |
|
"loss": 0.8692, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1064436418931762, |
|
"grad_norm": 0.6739017082503604, |
|
"learning_rate": 2.7035158612238555e-05, |
|
"loss": 0.8609, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10796426534879301, |
|
"grad_norm": 0.7433509963897333, |
|
"learning_rate": 2.7031878770170844e-05, |
|
"loss": 0.9063, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1094848888044098, |
|
"grad_norm": 0.6089113038384066, |
|
"learning_rate": 2.7028518219487355e-05, |
|
"loss": 0.8703, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11100551226002661, |
|
"grad_norm": 0.7618672475143956, |
|
"learning_rate": 2.7025076980305847e-05, |
|
"loss": 0.882, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11252613571564342, |
|
"grad_norm": 0.5112910612081544, |
|
"learning_rate": 2.7021555073227146e-05, |
|
"loss": 0.8726, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11404675917126021, |
|
"grad_norm": 0.6949396608818647, |
|
"learning_rate": 2.701795251933497e-05, |
|
"loss": 0.8671, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11556738262687702, |
|
"grad_norm": 0.741408465947176, |
|
"learning_rate": 2.7014269340195837e-05, |
|
"loss": 0.8816, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11708800608249383, |
|
"grad_norm": 0.6798649172316688, |
|
"learning_rate": 2.7010505557858927e-05, |
|
"loss": 0.87, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11860862953811062, |
|
"grad_norm": 0.7927523181373072, |
|
"learning_rate": 2.7006661194855928e-05, |
|
"loss": 0.8749, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12012925299372743, |
|
"grad_norm": 0.5635726177412718, |
|
"learning_rate": 2.7002736274200943e-05, |
|
"loss": 0.8472, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.12164987644934423, |
|
"grad_norm": 0.6854198399728353, |
|
"learning_rate": 2.699873081939032e-05, |
|
"loss": 0.8673, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12317049990496104, |
|
"grad_norm": 0.692082771269964, |
|
"learning_rate": 2.6994644854402514e-05, |
|
"loss": 0.887, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12469112336057783, |
|
"grad_norm": 0.5839628354594621, |
|
"learning_rate": 2.6990478403697964e-05, |
|
"loss": 0.8566, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12621174681619465, |
|
"grad_norm": 0.5365274637626306, |
|
"learning_rate": 2.698623149221892e-05, |
|
"loss": 0.8246, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12773237027181145, |
|
"grad_norm": 0.597903169885847, |
|
"learning_rate": 2.6981904145389317e-05, |
|
"loss": 0.8331, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12925299372742824, |
|
"grad_norm": 0.5825252240476892, |
|
"learning_rate": 2.697749638911461e-05, |
|
"loss": 0.8496, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.13077361718304506, |
|
"grad_norm": 0.7057959507531225, |
|
"learning_rate": 2.697300824978161e-05, |
|
"loss": 0.9079, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.13229424063866185, |
|
"grad_norm": 0.5509650257453511, |
|
"learning_rate": 2.6968439754258348e-05, |
|
"loss": 0.8447, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.13381486409427865, |
|
"grad_norm": 0.6392433098525124, |
|
"learning_rate": 2.6963790929893908e-05, |
|
"loss": 0.8563, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.13533548754989547, |
|
"grad_norm": 0.6150296701472108, |
|
"learning_rate": 2.695906180451825e-05, |
|
"loss": 0.8746, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13685611100551226, |
|
"grad_norm": 0.5243071440543693, |
|
"learning_rate": 2.6954252406442054e-05, |
|
"loss": 0.8462, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13837673446112905, |
|
"grad_norm": 0.5930951445222234, |
|
"learning_rate": 2.6949362764456548e-05, |
|
"loss": 0.8579, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13989735791674587, |
|
"grad_norm": 0.5384200127653291, |
|
"learning_rate": 2.694439290783334e-05, |
|
"loss": 0.8442, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.14141798137236267, |
|
"grad_norm": 0.4526509726759922, |
|
"learning_rate": 2.693934286632423e-05, |
|
"loss": 0.84, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.14293860482797946, |
|
"grad_norm": 0.5941168775185707, |
|
"learning_rate": 2.6934212670161057e-05, |
|
"loss": 0.8715, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.14445922828359628, |
|
"grad_norm": 0.5044642393405714, |
|
"learning_rate": 2.6929002350055486e-05, |
|
"loss": 0.8815, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14597985173921307, |
|
"grad_norm": 0.5805569476986123, |
|
"learning_rate": 2.6923711937198847e-05, |
|
"loss": 0.8678, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.14750047519482987, |
|
"grad_norm": 0.4790647294772535, |
|
"learning_rate": 2.6918341463261945e-05, |
|
"loss": 0.8557, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1490210986504467, |
|
"grad_norm": 0.5484601209066985, |
|
"learning_rate": 2.691289096039486e-05, |
|
"loss": 0.8565, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.15054172210606348, |
|
"grad_norm": 0.5095445371601812, |
|
"learning_rate": 2.6907360461226763e-05, |
|
"loss": 0.8785, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1520623455616803, |
|
"grad_norm": 0.5604825268976987, |
|
"learning_rate": 2.6901749998865718e-05, |
|
"loss": 0.891, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1535829690172971, |
|
"grad_norm": 0.49509934406592176, |
|
"learning_rate": 2.6896059606898493e-05, |
|
"loss": 0.8456, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1551035924729139, |
|
"grad_norm": 0.6476921749987207, |
|
"learning_rate": 2.6890289319390343e-05, |
|
"loss": 0.8217, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1566242159285307, |
|
"grad_norm": 0.4780896251811207, |
|
"learning_rate": 2.688443917088481e-05, |
|
"loss": 0.8711, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1581448393841475, |
|
"grad_norm": 0.6545666658477076, |
|
"learning_rate": 2.687850919640353e-05, |
|
"loss": 0.8444, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1596654628397643, |
|
"grad_norm": 0.61000177257977, |
|
"learning_rate": 2.687249943144601e-05, |
|
"loss": 0.8433, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.16118608629538111, |
|
"grad_norm": 0.5126707787512214, |
|
"learning_rate": 2.6866409911989412e-05, |
|
"loss": 0.839, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1627067097509979, |
|
"grad_norm": 0.4882732798086082, |
|
"learning_rate": 2.6860240674488355e-05, |
|
"loss": 0.8098, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1642273332066147, |
|
"grad_norm": 0.5554067699143529, |
|
"learning_rate": 2.685399175587468e-05, |
|
"loss": 0.8406, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.16574795666223152, |
|
"grad_norm": 0.530718125213625, |
|
"learning_rate": 2.6847663193557236e-05, |
|
"loss": 0.8281, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16726858011784831, |
|
"grad_norm": 0.4949823541094598, |
|
"learning_rate": 2.684125502542165e-05, |
|
"loss": 0.8457, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1687892035734651, |
|
"grad_norm": 0.556274418077457, |
|
"learning_rate": 2.683476728983012e-05, |
|
"loss": 0.8543, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.17030982702908193, |
|
"grad_norm": 0.45222659620728906, |
|
"learning_rate": 2.682820002562116e-05, |
|
"loss": 0.8348, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.17183045048469872, |
|
"grad_norm": 0.4911581618988163, |
|
"learning_rate": 2.6821553272109377e-05, |
|
"loss": 0.8276, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.17335107394031554, |
|
"grad_norm": 0.5259633420118487, |
|
"learning_rate": 2.6814827069085237e-05, |
|
"loss": 0.8319, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.17487169739593234, |
|
"grad_norm": 0.49901119248436054, |
|
"learning_rate": 2.6808021456814832e-05, |
|
"loss": 0.8608, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.17639232085154913, |
|
"grad_norm": 0.44652937902498396, |
|
"learning_rate": 2.6801136476039637e-05, |
|
"loss": 0.8363, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.17791294430716595, |
|
"grad_norm": 0.4814399942165516, |
|
"learning_rate": 2.6794172167976247e-05, |
|
"loss": 0.8291, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17943356776278274, |
|
"grad_norm": 0.4877382938090122, |
|
"learning_rate": 2.6787128574316158e-05, |
|
"loss": 0.8547, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.18095419121839953, |
|
"grad_norm": 0.6191110354751792, |
|
"learning_rate": 2.6780005737225512e-05, |
|
"loss": 0.81, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.18247481467401636, |
|
"grad_norm": 0.5437167197085974, |
|
"learning_rate": 2.677280369934482e-05, |
|
"loss": 0.8393, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18399543812963315, |
|
"grad_norm": 0.5297828079815319, |
|
"learning_rate": 2.676552250378873e-05, |
|
"loss": 0.7954, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.18551606158524994, |
|
"grad_norm": 0.5514706250971693, |
|
"learning_rate": 2.6758162194145783e-05, |
|
"loss": 0.815, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18703668504086676, |
|
"grad_norm": 0.5843944412164658, |
|
"learning_rate": 2.6750722814478098e-05, |
|
"loss": 0.8276, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18855730849648356, |
|
"grad_norm": 0.7942439470942368, |
|
"learning_rate": 2.6743204409321177e-05, |
|
"loss": 0.8254, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.19007793195210035, |
|
"grad_norm": 0.6174070370639455, |
|
"learning_rate": 2.673560702368358e-05, |
|
"loss": 0.8549, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.19159855540771717, |
|
"grad_norm": 0.49426597944430567, |
|
"learning_rate": 2.6727930703046695e-05, |
|
"loss": 0.8447, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.19311917886333396, |
|
"grad_norm": 0.4989733325059912, |
|
"learning_rate": 2.6720175493364437e-05, |
|
"loss": 0.8499, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.19463980231895078, |
|
"grad_norm": 0.5741663400348602, |
|
"learning_rate": 2.6712341441063006e-05, |
|
"loss": 0.8675, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.19616042577456758, |
|
"grad_norm": 0.532503832446581, |
|
"learning_rate": 2.6704428593040568e-05, |
|
"loss": 0.8053, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.19768104923018437, |
|
"grad_norm": 0.47559928839451404, |
|
"learning_rate": 2.6696436996667005e-05, |
|
"loss": 0.8075, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1992016726858012, |
|
"grad_norm": 0.5372739975864717, |
|
"learning_rate": 2.6688366699783625e-05, |
|
"loss": 0.8565, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.20072229614141798, |
|
"grad_norm": 0.5575627816877122, |
|
"learning_rate": 2.6680217750702874e-05, |
|
"loss": 0.8082, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.20224291959703478, |
|
"grad_norm": 0.5868207344499796, |
|
"learning_rate": 2.6671990198208038e-05, |
|
"loss": 0.781, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2037635430526516, |
|
"grad_norm": 0.4362747143153947, |
|
"learning_rate": 2.6663684091552962e-05, |
|
"loss": 0.817, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2052841665082684, |
|
"grad_norm": 0.5793319639019973, |
|
"learning_rate": 2.6655299480461753e-05, |
|
"loss": 0.8414, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.20680478996388518, |
|
"grad_norm": 0.5002998693635706, |
|
"learning_rate": 2.6646836415128478e-05, |
|
"loss": 0.8399, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.208325413419502, |
|
"grad_norm": 0.5183039332160528, |
|
"learning_rate": 2.6638294946216876e-05, |
|
"loss": 0.8388, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2098460368751188, |
|
"grad_norm": 0.5333906191333849, |
|
"learning_rate": 2.6629675124860034e-05, |
|
"loss": 0.8579, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2113666603307356, |
|
"grad_norm": 0.5678231978891924, |
|
"learning_rate": 2.662097700266009e-05, |
|
"loss": 0.7973, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2128872837863524, |
|
"grad_norm": 0.6779338312838604, |
|
"learning_rate": 2.6612200631687935e-05, |
|
"loss": 0.7991, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2144079072419692, |
|
"grad_norm": 0.4549445363589815, |
|
"learning_rate": 2.6603346064482896e-05, |
|
"loss": 0.8342, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.21592853069758602, |
|
"grad_norm": 0.5534663745815431, |
|
"learning_rate": 2.6594413354052406e-05, |
|
"loss": 0.8486, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.21744915415320282, |
|
"grad_norm": 0.5263202593856873, |
|
"learning_rate": 2.6585402553871707e-05, |
|
"loss": 0.8162, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2189697776088196, |
|
"grad_norm": 0.5005224606123282, |
|
"learning_rate": 2.6576313717883517e-05, |
|
"loss": 0.8157, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.22049040106443643, |
|
"grad_norm": 0.4408661056131368, |
|
"learning_rate": 2.6567146900497715e-05, |
|
"loss": 0.8073, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.22201102452005322, |
|
"grad_norm": 0.5697511997765187, |
|
"learning_rate": 2.655790215659101e-05, |
|
"loss": 0.814, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.22353164797567002, |
|
"grad_norm": 0.5227769257193289, |
|
"learning_rate": 2.654857954150661e-05, |
|
"loss": 0.8437, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.22505227143128684, |
|
"grad_norm": 0.5335084066857467, |
|
"learning_rate": 2.6539179111053904e-05, |
|
"loss": 0.8122, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.22657289488690363, |
|
"grad_norm": 0.6845924526755846, |
|
"learning_rate": 2.6529700921508117e-05, |
|
"loss": 0.8109, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.22809351834252042, |
|
"grad_norm": 0.5497202515145223, |
|
"learning_rate": 2.652014502960997e-05, |
|
"loss": 0.8482, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22961414179813724, |
|
"grad_norm": 0.5925506572004898, |
|
"learning_rate": 2.651051149256535e-05, |
|
"loss": 0.8182, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.23113476525375404, |
|
"grad_norm": 0.4748040759396778, |
|
"learning_rate": 2.6500800368044956e-05, |
|
"loss": 0.8236, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.23265538870937083, |
|
"grad_norm": 0.4949711715480288, |
|
"learning_rate": 2.6491011714183972e-05, |
|
"loss": 0.8044, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.23417601216498765, |
|
"grad_norm": 0.4942784715771247, |
|
"learning_rate": 2.6481145589581697e-05, |
|
"loss": 0.8288, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.23569663562060444, |
|
"grad_norm": 0.5323652293681927, |
|
"learning_rate": 2.647120205330121e-05, |
|
"loss": 0.7997, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.23721725907622124, |
|
"grad_norm": 0.6476575452897515, |
|
"learning_rate": 2.646118116486901e-05, |
|
"loss": 0.8077, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.23873788253183806, |
|
"grad_norm": 0.47184034820772325, |
|
"learning_rate": 2.6451082984274666e-05, |
|
"loss": 0.8292, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.24025850598745485, |
|
"grad_norm": 0.6205069464172376, |
|
"learning_rate": 2.6440907571970438e-05, |
|
"loss": 0.8393, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.24177912944307167, |
|
"grad_norm": 0.5209484434921768, |
|
"learning_rate": 2.6430654988870954e-05, |
|
"loss": 0.8245, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.24329975289868846, |
|
"grad_norm": 0.5933368833938595, |
|
"learning_rate": 2.6420325296352796e-05, |
|
"loss": 0.8197, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.24482037635430526, |
|
"grad_norm": 0.4788998696836293, |
|
"learning_rate": 2.6409918556254172e-05, |
|
"loss": 0.7937, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.24634099980992208, |
|
"grad_norm": 0.567878399230218, |
|
"learning_rate": 2.639943483087453e-05, |
|
"loss": 0.8109, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.24786162326553887, |
|
"grad_norm": 0.507261963638979, |
|
"learning_rate": 2.6388874182974187e-05, |
|
"loss": 0.8218, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.24938224672115566, |
|
"grad_norm": 0.5864902720090787, |
|
"learning_rate": 2.637823667577395e-05, |
|
"loss": 0.8176, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2509028701767725, |
|
"grad_norm": 0.5918504624886162, |
|
"learning_rate": 2.636752237295474e-05, |
|
"loss": 0.83, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2524234936323893, |
|
"grad_norm": 0.43858309995443784, |
|
"learning_rate": 2.6356731338657212e-05, |
|
"loss": 0.8123, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.25394411708800607, |
|
"grad_norm": 0.49034964682512816, |
|
"learning_rate": 2.6345863637481374e-05, |
|
"loss": 0.8174, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2554647405436229, |
|
"grad_norm": 0.5837987779382616, |
|
"learning_rate": 2.6334919334486195e-05, |
|
"loss": 0.8298, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2569853639992397, |
|
"grad_norm": 0.5281077280878048, |
|
"learning_rate": 2.6323898495189215e-05, |
|
"loss": 0.8175, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2585059874548565, |
|
"grad_norm": 0.5014520173664685, |
|
"learning_rate": 2.6312801185566158e-05, |
|
"loss": 0.799, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2600266109104733, |
|
"grad_norm": 0.6803320146493841, |
|
"learning_rate": 2.6301627472050526e-05, |
|
"loss": 0.7996, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2615472343660901, |
|
"grad_norm": 0.7395949415978587, |
|
"learning_rate": 2.629037742153322e-05, |
|
"loss": 0.8107, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2630678578217069, |
|
"grad_norm": 0.4513389629082933, |
|
"learning_rate": 2.6279051101362122e-05, |
|
"loss": 0.7952, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2645884812773237, |
|
"grad_norm": 0.6486009072322451, |
|
"learning_rate": 2.62676485793417e-05, |
|
"loss": 0.7874, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2661091047329405, |
|
"grad_norm": 0.5502319884395829, |
|
"learning_rate": 2.62561699237326e-05, |
|
"loss": 0.7963, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2676297281885573, |
|
"grad_norm": 0.5997728345326634, |
|
"learning_rate": 2.624461520325124e-05, |
|
"loss": 0.7972, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2691503516441741, |
|
"grad_norm": 0.7905728427904212, |
|
"learning_rate": 2.62329844870694e-05, |
|
"loss": 0.8029, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.27067097509979093, |
|
"grad_norm": 0.6068096518092964, |
|
"learning_rate": 2.62212778448138e-05, |
|
"loss": 0.8434, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2721915985554077, |
|
"grad_norm": 0.8084200216220121, |
|
"learning_rate": 2.620949534656568e-05, |
|
"loss": 0.8005, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2737122220110245, |
|
"grad_norm": 0.4950225377554484, |
|
"learning_rate": 2.61976370628604e-05, |
|
"loss": 0.8261, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.27523284546664134, |
|
"grad_norm": 0.5184301676475981, |
|
"learning_rate": 2.618570306468701e-05, |
|
"loss": 0.8239, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2767534689222581, |
|
"grad_norm": 0.601710109353413, |
|
"learning_rate": 2.61736934234878e-05, |
|
"loss": 0.7888, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2782740923778749, |
|
"grad_norm": 0.5736633456413043, |
|
"learning_rate": 2.616160821115792e-05, |
|
"loss": 0.8098, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.27979471583349175, |
|
"grad_norm": 0.5690928538172545, |
|
"learning_rate": 2.6149447500044904e-05, |
|
"loss": 0.8294, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2813153392891085, |
|
"grad_norm": 0.6275365250577597, |
|
"learning_rate": 2.6137211362948256e-05, |
|
"loss": 0.8386, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.28283596274472533, |
|
"grad_norm": 0.6701221466806971, |
|
"learning_rate": 2.612489987311903e-05, |
|
"loss": 0.791, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.28435658620034215, |
|
"grad_norm": 0.7648465329508422, |
|
"learning_rate": 2.6112513104259354e-05, |
|
"loss": 0.8125, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2858772096559589, |
|
"grad_norm": 0.44806704127100555, |
|
"learning_rate": 2.6100051130522028e-05, |
|
"loss": 0.7849, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.28739783311157574, |
|
"grad_norm": 0.7123885835323628, |
|
"learning_rate": 2.608751402651006e-05, |
|
"loss": 0.7607, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.28891845656719256, |
|
"grad_norm": 0.4934771024884215, |
|
"learning_rate": 2.6074901867276204e-05, |
|
"loss": 0.8035, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2904390800228093, |
|
"grad_norm": 0.8095025862539972, |
|
"learning_rate": 2.6062214728322555e-05, |
|
"loss": 0.7829, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.29195970347842615, |
|
"grad_norm": 0.5050611453741563, |
|
"learning_rate": 2.6049452685600052e-05, |
|
"loss": 0.7977, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.29348032693404297, |
|
"grad_norm": 0.7210248910137731, |
|
"learning_rate": 2.6036615815508043e-05, |
|
"loss": 0.7956, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.29500095038965973, |
|
"grad_norm": 0.5038427749270625, |
|
"learning_rate": 2.6023704194893835e-05, |
|
"loss": 0.7938, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.29652157384527655, |
|
"grad_norm": 0.6126219303001782, |
|
"learning_rate": 2.6010717901052224e-05, |
|
"loss": 0.7922, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2980421973008934, |
|
"grad_norm": 0.5362006682961364, |
|
"learning_rate": 2.5997657011725022e-05, |
|
"loss": 0.8688, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2995628207565102, |
|
"grad_norm": 0.5017573346892746, |
|
"learning_rate": 2.598452160510062e-05, |
|
"loss": 0.7833, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.30108344421212696, |
|
"grad_norm": 0.5086171325358533, |
|
"learning_rate": 2.597131175981349e-05, |
|
"loss": 0.8173, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3026040676677438, |
|
"grad_norm": 0.46242832959563607, |
|
"learning_rate": 2.5958027554943737e-05, |
|
"loss": 0.8228, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3041246911233606, |
|
"grad_norm": 0.5365620033072362, |
|
"learning_rate": 2.594466907001661e-05, |
|
"loss": 0.8468, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.30564531457897737, |
|
"grad_norm": 0.4536900282460799, |
|
"learning_rate": 2.5931236385002023e-05, |
|
"loss": 0.7659, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3071659380345942, |
|
"grad_norm": 0.5349495426115096, |
|
"learning_rate": 2.591772958031411e-05, |
|
"loss": 0.8031, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.308686561490211, |
|
"grad_norm": 0.5171572447957907, |
|
"learning_rate": 2.5904148736810704e-05, |
|
"loss": 0.8192, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3102071849458278, |
|
"grad_norm": 0.543619906524382, |
|
"learning_rate": 2.5890493935792855e-05, |
|
"loss": 0.8088, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3117278084014446, |
|
"grad_norm": 0.5471779246235068, |
|
"learning_rate": 2.5876765259004386e-05, |
|
"loss": 0.775, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3132484318570614, |
|
"grad_norm": 0.5530162050934202, |
|
"learning_rate": 2.5862962788631346e-05, |
|
"loss": 0.7886, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3147690553126782, |
|
"grad_norm": 0.5564374353682834, |
|
"learning_rate": 2.5849086607301564e-05, |
|
"loss": 0.7822, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.316289678768295, |
|
"grad_norm": 0.50158890267917, |
|
"learning_rate": 2.583513679808412e-05, |
|
"loss": 0.7857, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3178103022239118, |
|
"grad_norm": 0.5514584077868472, |
|
"learning_rate": 2.582111344448888e-05, |
|
"loss": 0.7834, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3193309256795286, |
|
"grad_norm": 0.4984807668723581, |
|
"learning_rate": 2.580701663046597e-05, |
|
"loss": 0.7979, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3208515491351454, |
|
"grad_norm": 0.5779043543552743, |
|
"learning_rate": 2.579284644040529e-05, |
|
"loss": 0.8365, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.32237217259076223, |
|
"grad_norm": 0.4914952548115715, |
|
"learning_rate": 2.5778602959135996e-05, |
|
"loss": 0.8082, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.323892796046379, |
|
"grad_norm": 0.45463188187952797, |
|
"learning_rate": 2.5764286271925997e-05, |
|
"loss": 0.8002, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3254134195019958, |
|
"grad_norm": 0.5405224256712096, |
|
"learning_rate": 2.5749896464481448e-05, |
|
"loss": 0.8177, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.32693404295761264, |
|
"grad_norm": 0.4945710149798116, |
|
"learning_rate": 2.5735433622946242e-05, |
|
"loss": 0.8202, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3284546664132294, |
|
"grad_norm": 0.5193387642723397, |
|
"learning_rate": 2.572089783390148e-05, |
|
"loss": 0.7878, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3299752898688462, |
|
"grad_norm": 0.43644483818152885, |
|
"learning_rate": 2.5706289184364962e-05, |
|
"loss": 0.8465, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.33149591332446304, |
|
"grad_norm": 0.6429429755905944, |
|
"learning_rate": 2.569160776179065e-05, |
|
"loss": 0.793, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3330165367800798, |
|
"grad_norm": 0.4881784207711263, |
|
"learning_rate": 2.567685365406819e-05, |
|
"loss": 0.7787, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.33453716023569663, |
|
"grad_norm": 0.5903797222010695, |
|
"learning_rate": 2.566202694952232e-05, |
|
"loss": 0.8071, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.33605778369131345, |
|
"grad_norm": 0.4935742650802179, |
|
"learning_rate": 2.5647127736912397e-05, |
|
"loss": 0.8128, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3375784071469302, |
|
"grad_norm": 0.6206075855689506, |
|
"learning_rate": 2.5632156105431836e-05, |
|
"loss": 0.8134, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.33909903060254704, |
|
"grad_norm": 0.4693335100552506, |
|
"learning_rate": 2.561711214470759e-05, |
|
"loss": 0.7877, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.34061965405816386, |
|
"grad_norm": 0.5541352157938085, |
|
"learning_rate": 2.560199594479959e-05, |
|
"loss": 0.793, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3421402775137807, |
|
"grad_norm": 0.5119311192678664, |
|
"learning_rate": 2.5586807596200247e-05, |
|
"loss": 0.8076, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.34366090096939744, |
|
"grad_norm": 0.44821567228976084, |
|
"learning_rate": 2.5571547189833876e-05, |
|
"loss": 0.7676, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.34518152442501426, |
|
"grad_norm": 0.5108021271320223, |
|
"learning_rate": 2.555621481705616e-05, |
|
"loss": 0.7761, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.3467021478806311, |
|
"grad_norm": 0.520419930384087, |
|
"learning_rate": 2.5540810569653606e-05, |
|
"loss": 0.8113, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.34822277133624785, |
|
"grad_norm": 0.49172520245402473, |
|
"learning_rate": 2.5525334539843e-05, |
|
"loss": 0.8049, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.34974339479186467, |
|
"grad_norm": 0.5350433416575631, |
|
"learning_rate": 2.550978682027084e-05, |
|
"loss": 0.7918, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3512640182474815, |
|
"grad_norm": 0.5521639416583181, |
|
"learning_rate": 2.5494167504012807e-05, |
|
"loss": 0.7705, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.35278464170309826, |
|
"grad_norm": 0.4924879256498397, |
|
"learning_rate": 2.547847668457318e-05, |
|
"loss": 0.8116, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.3543052651587151, |
|
"grad_norm": 0.5466037905256877, |
|
"learning_rate": 2.5462714455884287e-05, |
|
"loss": 0.7808, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3558258886143319, |
|
"grad_norm": 0.5030091896084465, |
|
"learning_rate": 2.5446880912305954e-05, |
|
"loss": 0.7968, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.35734651206994866, |
|
"grad_norm": 0.47695683680901585, |
|
"learning_rate": 2.543097614862492e-05, |
|
"loss": 0.7777, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3588671355255655, |
|
"grad_norm": 0.49323354270252745, |
|
"learning_rate": 2.5415000260054287e-05, |
|
"loss": 0.8114, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3603877589811823, |
|
"grad_norm": 0.5547918130480031, |
|
"learning_rate": 2.5398953342232942e-05, |
|
"loss": 0.7959, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.36190838243679907, |
|
"grad_norm": 0.42592099953025825, |
|
"learning_rate": 2.5382835491224977e-05, |
|
"loss": 0.8067, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3634290058924159, |
|
"grad_norm": 0.5473160997053151, |
|
"learning_rate": 2.536664680351913e-05, |
|
"loss": 0.8149, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3649496293480327, |
|
"grad_norm": 0.5161750461424985, |
|
"learning_rate": 2.5350387376028207e-05, |
|
"loss": 0.7456, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3664702528036495, |
|
"grad_norm": 0.562540015047346, |
|
"learning_rate": 2.5334057306088472e-05, |
|
"loss": 0.8161, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3679908762592663, |
|
"grad_norm": 0.5331918164095757, |
|
"learning_rate": 2.5317656691459103e-05, |
|
"loss": 0.7834, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3695114997148831, |
|
"grad_norm": 0.5704962131477223, |
|
"learning_rate": 2.530118563032159e-05, |
|
"loss": 0.7881, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3710321231704999, |
|
"grad_norm": 0.5208634783184293, |
|
"learning_rate": 2.5284644221279147e-05, |
|
"loss": 0.8138, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.3725527466261167, |
|
"grad_norm": 0.5211185784219639, |
|
"learning_rate": 2.526803256335611e-05, |
|
"loss": 0.831, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3740733700817335, |
|
"grad_norm": 0.5045487224701021, |
|
"learning_rate": 2.525135075599738e-05, |
|
"loss": 0.8156, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3755939935373503, |
|
"grad_norm": 0.5069878820838861, |
|
"learning_rate": 2.523459889906778e-05, |
|
"loss": 0.7955, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3771146169929671, |
|
"grad_norm": 0.4949818081031886, |
|
"learning_rate": 2.5217777092851496e-05, |
|
"loss": 0.7941, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.37863524044858393, |
|
"grad_norm": 0.47385420044302173, |
|
"learning_rate": 2.5200885438051454e-05, |
|
"loss": 0.7897, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3801558639042007, |
|
"grad_norm": 0.4584812154097583, |
|
"learning_rate": 2.5183924035788738e-05, |
|
"loss": 0.8149, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3816764873598175, |
|
"grad_norm": 0.40766128893251813, |
|
"learning_rate": 2.5166892987601954e-05, |
|
"loss": 0.7965, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.38319711081543434, |
|
"grad_norm": 0.4464504741734316, |
|
"learning_rate": 2.5149792395446655e-05, |
|
"loss": 0.7793, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3847177342710511, |
|
"grad_norm": 0.42738515223990925, |
|
"learning_rate": 2.5132622361694704e-05, |
|
"loss": 0.7957, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3862383577266679, |
|
"grad_norm": 0.4980456405593658, |
|
"learning_rate": 2.5115382989133686e-05, |
|
"loss": 0.8032, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.38775898118228475, |
|
"grad_norm": 0.5114356446133503, |
|
"learning_rate": 2.5098074380966263e-05, |
|
"loss": 0.781, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.38927960463790157, |
|
"grad_norm": 0.4141552486101403, |
|
"learning_rate": 2.508069664080959e-05, |
|
"loss": 0.7593, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.39080022809351833, |
|
"grad_norm": 0.43458143129055027, |
|
"learning_rate": 2.5063249872694662e-05, |
|
"loss": 0.7879, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.39232085154913515, |
|
"grad_norm": 0.45941178211274825, |
|
"learning_rate": 2.504573418106572e-05, |
|
"loss": 0.7947, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.393841475004752, |
|
"grad_norm": 0.49731812944358156, |
|
"learning_rate": 2.5028149670779597e-05, |
|
"loss": 0.7796, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.39536209846036874, |
|
"grad_norm": 0.4021875079520183, |
|
"learning_rate": 2.5010496447105118e-05, |
|
"loss": 0.7844, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.39688272191598556, |
|
"grad_norm": 0.5734524869531662, |
|
"learning_rate": 2.4992774615722457e-05, |
|
"loss": 0.7739, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3984033453716024, |
|
"grad_norm": 0.4767545528986454, |
|
"learning_rate": 2.4974984282722498e-05, |
|
"loss": 0.7787, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.39992396882721915, |
|
"grad_norm": 0.45569173851803857, |
|
"learning_rate": 2.4957125554606207e-05, |
|
"loss": 0.7957, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.40144459228283597, |
|
"grad_norm": 0.4583616284261655, |
|
"learning_rate": 2.4939198538284008e-05, |
|
"loss": 0.7903, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.4029652157384528, |
|
"grad_norm": 0.44137521284735437, |
|
"learning_rate": 2.4921203341075102e-05, |
|
"loss": 0.8021, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.40448583919406955, |
|
"grad_norm": 0.4648974770143655, |
|
"learning_rate": 2.4903140070706876e-05, |
|
"loss": 0.7739, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4060064626496864, |
|
"grad_norm": 0.521001369014178, |
|
"learning_rate": 2.4885008835314228e-05, |
|
"loss": 0.7837, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4075270861053032, |
|
"grad_norm": 0.4601570641740828, |
|
"learning_rate": 2.4866809743438915e-05, |
|
"loss": 0.7891, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.40904770956091996, |
|
"grad_norm": 0.42883010607831595, |
|
"learning_rate": 2.484854290402893e-05, |
|
"loss": 0.8002, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4105683330165368, |
|
"grad_norm": 0.5048746687096498, |
|
"learning_rate": 2.4830208426437816e-05, |
|
"loss": 0.8067, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4120889564721536, |
|
"grad_norm": 0.4373843602878167, |
|
"learning_rate": 2.4811806420424043e-05, |
|
"loss": 0.7661, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.41360957992777037, |
|
"grad_norm": 0.4924768994796526, |
|
"learning_rate": 2.479333699615033e-05, |
|
"loss": 0.8032, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4151302033833872, |
|
"grad_norm": 0.5495367191198085, |
|
"learning_rate": 2.477480026418299e-05, |
|
"loss": 0.7823, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.416650826839004, |
|
"grad_norm": 0.4439519855217945, |
|
"learning_rate": 2.4756196335491273e-05, |
|
"loss": 0.7655, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4181714502946208, |
|
"grad_norm": 0.5010590403262122, |
|
"learning_rate": 2.47375253214467e-05, |
|
"loss": 0.7547, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4196920737502376, |
|
"grad_norm": 0.4482088714543724, |
|
"learning_rate": 2.4718787333822395e-05, |
|
"loss": 0.7649, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4212126972058544, |
|
"grad_norm": 0.40410880639654234, |
|
"learning_rate": 2.469998248479242e-05, |
|
"loss": 0.7669, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.4227333206614712, |
|
"grad_norm": 0.4569274893231673, |
|
"learning_rate": 2.468111088693109e-05, |
|
"loss": 0.7526, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.424253944117088, |
|
"grad_norm": 0.42296277869189824, |
|
"learning_rate": 2.4662172653212313e-05, |
|
"loss": 0.7727, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4257745675727048, |
|
"grad_norm": 0.4835330143189326, |
|
"learning_rate": 2.464316789700891e-05, |
|
"loss": 0.7633, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4272951910283216, |
|
"grad_norm": 0.4287243627654868, |
|
"learning_rate": 2.462409673209194e-05, |
|
"loss": 0.7836, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.4288158144839384, |
|
"grad_norm": 0.5472776086861091, |
|
"learning_rate": 2.4604959272630002e-05, |
|
"loss": 0.778, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4303364379395552, |
|
"grad_norm": 0.43700006561477783, |
|
"learning_rate": 2.4585755633188585e-05, |
|
"loss": 0.7657, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.43185706139517205, |
|
"grad_norm": 0.5233246056259679, |
|
"learning_rate": 2.4566485928729338e-05, |
|
"loss": 0.7875, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.4333776848507888, |
|
"grad_norm": 0.4739436376744406, |
|
"learning_rate": 2.454715027460942e-05, |
|
"loss": 0.7769, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.43489830830640563, |
|
"grad_norm": 0.466587612255068, |
|
"learning_rate": 2.4527748786580785e-05, |
|
"loss": 0.7763, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.43641893176202246, |
|
"grad_norm": 0.5917227864064121, |
|
"learning_rate": 2.4508281580789508e-05, |
|
"loss": 0.7643, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.4379395552176392, |
|
"grad_norm": 0.47768374233206695, |
|
"learning_rate": 2.448874877377508e-05, |
|
"loss": 0.7776, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.43946017867325604, |
|
"grad_norm": 0.527002493093892, |
|
"learning_rate": 2.4469150482469695e-05, |
|
"loss": 0.7796, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.44098080212887286, |
|
"grad_norm": 0.4662349412142309, |
|
"learning_rate": 2.4449486824197585e-05, |
|
"loss": 0.7385, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4425014255844896, |
|
"grad_norm": 0.4732365292282727, |
|
"learning_rate": 2.4429757916674292e-05, |
|
"loss": 0.7967, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.44402204904010645, |
|
"grad_norm": 0.47222270479478673, |
|
"learning_rate": 2.440996387800596e-05, |
|
"loss": 0.7682, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.44554267249572327, |
|
"grad_norm": 0.47686692962034305, |
|
"learning_rate": 2.439010482668865e-05, |
|
"loss": 0.7611, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.44706329595134003, |
|
"grad_norm": 0.4008859391749161, |
|
"learning_rate": 2.4370180881607614e-05, |
|
"loss": 0.7566, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.44858391940695685, |
|
"grad_norm": 0.48140611283685464, |
|
"learning_rate": 2.4350192162036584e-05, |
|
"loss": 0.8217, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4501045428625737, |
|
"grad_norm": 0.4560651975700714, |
|
"learning_rate": 2.4330138787637062e-05, |
|
"loss": 0.7976, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.45162516631819044, |
|
"grad_norm": 0.4034722549460792, |
|
"learning_rate": 2.431002087845761e-05, |
|
"loss": 0.7679, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.45314578977380726, |
|
"grad_norm": 0.4807251922703316, |
|
"learning_rate": 2.4289838554933113e-05, |
|
"loss": 0.7984, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4546664132294241, |
|
"grad_norm": 0.4746736626277042, |
|
"learning_rate": 2.4269591937884083e-05, |
|
"loss": 0.7794, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.45618703668504085, |
|
"grad_norm": 0.47543886824696496, |
|
"learning_rate": 2.4249281148515904e-05, |
|
"loss": 0.7865, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.45770766014065767, |
|
"grad_norm": 0.4982520457747873, |
|
"learning_rate": 2.4228906308418148e-05, |
|
"loss": 0.7653, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.4592282835962745, |
|
"grad_norm": 0.4541992280820245, |
|
"learning_rate": 2.42084675395638e-05, |
|
"loss": 0.8036, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.46074890705189125, |
|
"grad_norm": 0.4933234210123189, |
|
"learning_rate": 2.4187964964308566e-05, |
|
"loss": 0.7764, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4622695305075081, |
|
"grad_norm": 0.46059558913855275, |
|
"learning_rate": 2.4167398705390122e-05, |
|
"loss": 0.7736, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.4637901539631249, |
|
"grad_norm": 0.5168855999894718, |
|
"learning_rate": 2.414676888592739e-05, |
|
"loss": 0.8157, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.46531077741874166, |
|
"grad_norm": 0.46112787242313547, |
|
"learning_rate": 2.4126075629419772e-05, |
|
"loss": 0.767, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.4668314008743585, |
|
"grad_norm": 0.4302292629253772, |
|
"learning_rate": 2.4105319059746465e-05, |
|
"loss": 0.7775, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4683520243299753, |
|
"grad_norm": 0.4152046813472176, |
|
"learning_rate": 2.4084499301165655e-05, |
|
"loss": 0.8137, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.46987264778559207, |
|
"grad_norm": 0.4876150328408346, |
|
"learning_rate": 2.4063616478313835e-05, |
|
"loss": 0.7809, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.4713932712412089, |
|
"grad_norm": 0.47894882147037643, |
|
"learning_rate": 2.4042670716205003e-05, |
|
"loss": 0.8071, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4729138946968257, |
|
"grad_norm": 0.45314893137296913, |
|
"learning_rate": 2.4021662140229957e-05, |
|
"loss": 0.7734, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.4744345181524425, |
|
"grad_norm": 0.4693692990401656, |
|
"learning_rate": 2.4000590876155523e-05, |
|
"loss": 0.8049, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.4759551416080593, |
|
"grad_norm": 0.48467692671175333, |
|
"learning_rate": 2.3979457050123804e-05, |
|
"loss": 0.7605, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4774757650636761, |
|
"grad_norm": 0.4222969005662164, |
|
"learning_rate": 2.3958260788651426e-05, |
|
"loss": 0.7788, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.47899638851929294, |
|
"grad_norm": 0.5542291365632019, |
|
"learning_rate": 2.3937002218628792e-05, |
|
"loss": 0.7928, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4805170119749097, |
|
"grad_norm": 0.392436349975769, |
|
"learning_rate": 2.3915681467319292e-05, |
|
"loss": 0.7607, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4820376354305265, |
|
"grad_norm": 0.49739152408387094, |
|
"learning_rate": 2.3894298662358583e-05, |
|
"loss": 0.7614, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.48355825888614334, |
|
"grad_norm": 0.433372030563949, |
|
"learning_rate": 2.3872853931753787e-05, |
|
"loss": 0.7652, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4850788823417601, |
|
"grad_norm": 0.46102099141447744, |
|
"learning_rate": 2.385134740388275e-05, |
|
"loss": 0.774, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.48659950579737693, |
|
"grad_norm": 0.4638491872936722, |
|
"learning_rate": 2.382977920749327e-05, |
|
"loss": 0.7739, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.48812012925299375, |
|
"grad_norm": 0.5020784828944407, |
|
"learning_rate": 2.3808149471702295e-05, |
|
"loss": 0.7631, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4896407527086105, |
|
"grad_norm": 0.42204949679328635, |
|
"learning_rate": 2.3786458325995214e-05, |
|
"loss": 0.7823, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.49116137616422734, |
|
"grad_norm": 0.5055396085814678, |
|
"learning_rate": 2.3764705900225004e-05, |
|
"loss": 0.7578, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.49268199961984416, |
|
"grad_norm": 0.47833233111711587, |
|
"learning_rate": 2.374289232461152e-05, |
|
"loss": 0.7545, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4942026230754609, |
|
"grad_norm": 0.3848734118646103, |
|
"learning_rate": 2.372101772974068e-05, |
|
"loss": 0.7141, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.49572324653107774, |
|
"grad_norm": 0.5367848465213906, |
|
"learning_rate": 2.3699082246563687e-05, |
|
"loss": 0.7789, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.49724386998669456, |
|
"grad_norm": 0.5461740930935474, |
|
"learning_rate": 2.367708600639625e-05, |
|
"loss": 0.7457, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.49876449344231133, |
|
"grad_norm": 0.4247642096240754, |
|
"learning_rate": 2.3655029140917794e-05, |
|
"loss": 0.76, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5002851168979282, |
|
"grad_norm": 0.44998842461109745, |
|
"learning_rate": 2.3632911782170683e-05, |
|
"loss": 0.7793, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.501805740353545, |
|
"grad_norm": 0.47053242929737366, |
|
"learning_rate": 2.3610734062559418e-05, |
|
"loss": 0.7858, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5033263638091617, |
|
"grad_norm": 0.43924849365849383, |
|
"learning_rate": 2.3588496114849843e-05, |
|
"loss": 0.8032, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5048469872647786, |
|
"grad_norm": 0.4355576929843276, |
|
"learning_rate": 2.3566198072168355e-05, |
|
"loss": 0.7691, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5063676107203954, |
|
"grad_norm": 0.4498593288508342, |
|
"learning_rate": 2.354384006800112e-05, |
|
"loss": 0.7538, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5078882341760121, |
|
"grad_norm": 0.42469702355418737, |
|
"learning_rate": 2.352142223619323e-05, |
|
"loss": 0.7594, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.509408857631629, |
|
"grad_norm": 0.5132890157597293, |
|
"learning_rate": 2.3498944710947973e-05, |
|
"loss": 0.7693, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5109294810872458, |
|
"grad_norm": 0.43927719097563, |
|
"learning_rate": 2.3476407626825953e-05, |
|
"loss": 0.7783, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5124501045428626, |
|
"grad_norm": 0.4416642559264683, |
|
"learning_rate": 2.3453811118744345e-05, |
|
"loss": 0.7683, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5139707279984794, |
|
"grad_norm": 0.44247651582101677, |
|
"learning_rate": 2.3431155321976045e-05, |
|
"loss": 0.746, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5154913514540962, |
|
"grad_norm": 0.4243771777807823, |
|
"learning_rate": 2.340844037214889e-05, |
|
"loss": 0.7735, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.517011974909713, |
|
"grad_norm": 0.45520920364804773, |
|
"learning_rate": 2.3385666405244823e-05, |
|
"loss": 0.7605, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5185325983653298, |
|
"grad_norm": 0.41908969886233616, |
|
"learning_rate": 2.3362833557599105e-05, |
|
"loss": 0.7767, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5200532218209466, |
|
"grad_norm": 0.41422212342426945, |
|
"learning_rate": 2.333994196589947e-05, |
|
"loss": 0.7858, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5215738452765634, |
|
"grad_norm": 0.45155134128630714, |
|
"learning_rate": 2.331699176718533e-05, |
|
"loss": 0.7436, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5230944687321802, |
|
"grad_norm": 0.4034165723189431, |
|
"learning_rate": 2.3293983098846937e-05, |
|
"loss": 0.7615, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.524615092187797, |
|
"grad_norm": 0.41147189894163816, |
|
"learning_rate": 2.3270916098624567e-05, |
|
"loss": 0.7498, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5261357156434138, |
|
"grad_norm": 0.4234882343930261, |
|
"learning_rate": 2.3247790904607715e-05, |
|
"loss": 0.7078, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.5276563390990306, |
|
"grad_norm": 0.4238992343706461, |
|
"learning_rate": 2.322460765523423e-05, |
|
"loss": 0.769, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5291769625546474, |
|
"grad_norm": 0.4491430678713249, |
|
"learning_rate": 2.3201366489289512e-05, |
|
"loss": 0.7742, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5306975860102642, |
|
"grad_norm": 0.4445057795157137, |
|
"learning_rate": 2.317806754590568e-05, |
|
"loss": 0.7798, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.532218209465881, |
|
"grad_norm": 0.45149251584132893, |
|
"learning_rate": 2.3154710964560735e-05, |
|
"loss": 0.7705, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5337388329214978, |
|
"grad_norm": 0.4379914224804398, |
|
"learning_rate": 2.313129688507772e-05, |
|
"loss": 0.7465, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.5352594563771146, |
|
"grad_norm": 0.5240644813586705, |
|
"learning_rate": 2.3107825447623893e-05, |
|
"loss": 0.7792, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5367800798327315, |
|
"grad_norm": 0.43614968464292103, |
|
"learning_rate": 2.3084296792709876e-05, |
|
"loss": 0.8012, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5383007032883482, |
|
"grad_norm": 0.5238927538180428, |
|
"learning_rate": 2.3060711061188828e-05, |
|
"loss": 0.7121, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.539821326743965, |
|
"grad_norm": 0.44343055771039847, |
|
"learning_rate": 2.303706839425559e-05, |
|
"loss": 0.7597, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5413419501995819, |
|
"grad_norm": 0.4760425163011581, |
|
"learning_rate": 2.301336893344584e-05, |
|
"loss": 0.7686, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.5428625736551986, |
|
"grad_norm": 0.3864387482026591, |
|
"learning_rate": 2.2989612820635268e-05, |
|
"loss": 0.7634, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5443831971108154, |
|
"grad_norm": 0.4454248548842092, |
|
"learning_rate": 2.2965800198038684e-05, |
|
"loss": 0.7462, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5459038205664323, |
|
"grad_norm": 0.4045116377861445, |
|
"learning_rate": 2.2941931208209206e-05, |
|
"loss": 0.7538, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.547424444022049, |
|
"grad_norm": 0.45148513549461083, |
|
"learning_rate": 2.291800599403739e-05, |
|
"loss": 0.7576, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5489450674776658, |
|
"grad_norm": 0.4144212534792813, |
|
"learning_rate": 2.289402469875037e-05, |
|
"loss": 0.7679, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.5504656909332827, |
|
"grad_norm": 0.3949666631774562, |
|
"learning_rate": 2.2869987465911015e-05, |
|
"loss": 0.7334, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5519863143888994, |
|
"grad_norm": 0.592549458633791, |
|
"learning_rate": 2.284589443941706e-05, |
|
"loss": 0.766, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.5535069378445162, |
|
"grad_norm": 0.3625351248308574, |
|
"learning_rate": 2.2821745763500237e-05, |
|
"loss": 0.7502, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5550275613001331, |
|
"grad_norm": 0.55689868686449, |
|
"learning_rate": 2.279754158272543e-05, |
|
"loss": 0.741, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5565481847557499, |
|
"grad_norm": 0.4232933895876237, |
|
"learning_rate": 2.2773282041989803e-05, |
|
"loss": 0.7203, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5580688082113666, |
|
"grad_norm": 0.47439658622550684, |
|
"learning_rate": 2.274896728652192e-05, |
|
"loss": 0.7413, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5595894316669835, |
|
"grad_norm": 0.4229854780360866, |
|
"learning_rate": 2.2724597461880887e-05, |
|
"loss": 0.7919, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.5611100551226003, |
|
"grad_norm": 0.449535321441458, |
|
"learning_rate": 2.2700172713955486e-05, |
|
"loss": 0.7871, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.562630678578217, |
|
"grad_norm": 0.42277942967988674, |
|
"learning_rate": 2.267569318896329e-05, |
|
"loss": 0.7492, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5641513020338339, |
|
"grad_norm": 0.4008266318587021, |
|
"learning_rate": 2.2651159033449787e-05, |
|
"loss": 0.7599, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5656719254894507, |
|
"grad_norm": 0.46433746216794397, |
|
"learning_rate": 2.2626570394287524e-05, |
|
"loss": 0.769, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5671925489450674, |
|
"grad_norm": 0.4052331130445469, |
|
"learning_rate": 2.260192741867519e-05, |
|
"loss": 0.7584, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.5687131724006843, |
|
"grad_norm": 0.4069059419780127, |
|
"learning_rate": 2.2577230254136776e-05, |
|
"loss": 0.7605, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.5702337958563011, |
|
"grad_norm": 0.45966108042874787, |
|
"learning_rate": 2.2552479048520657e-05, |
|
"loss": 0.771, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5717544193119178, |
|
"grad_norm": 0.4230352997839189, |
|
"learning_rate": 2.2527673949998737e-05, |
|
"loss": 0.7859, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5732750427675347, |
|
"grad_norm": 0.42063368235914506, |
|
"learning_rate": 2.250281510706553e-05, |
|
"loss": 0.7962, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5747956662231515, |
|
"grad_norm": 0.41245979398339844, |
|
"learning_rate": 2.2477902668537307e-05, |
|
"loss": 0.7692, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.5763162896787682, |
|
"grad_norm": 0.42587671502291896, |
|
"learning_rate": 2.2452936783551174e-05, |
|
"loss": 0.7659, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.5778369131343851, |
|
"grad_norm": 0.39899767427234584, |
|
"learning_rate": 2.2427917601564194e-05, |
|
"loss": 0.7576, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5793575365900019, |
|
"grad_norm": 0.4504820473523934, |
|
"learning_rate": 2.2402845272352495e-05, |
|
"loss": 0.7657, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.5808781600456187, |
|
"grad_norm": 0.41018097177091434, |
|
"learning_rate": 2.237771994601036e-05, |
|
"loss": 0.7477, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.5823987835012355, |
|
"grad_norm": 0.38305001213351025, |
|
"learning_rate": 2.235254177294934e-05, |
|
"loss": 0.7232, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5839194069568523, |
|
"grad_norm": 0.47095376013594553, |
|
"learning_rate": 2.232731090389736e-05, |
|
"loss": 0.7741, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5854400304124691, |
|
"grad_norm": 0.4636124674958436, |
|
"learning_rate": 2.2302027489897786e-05, |
|
"loss": 0.7517, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5869606538680859, |
|
"grad_norm": 0.44034211588944, |
|
"learning_rate": 2.2276691682308565e-05, |
|
"loss": 0.7418, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5884812773237027, |
|
"grad_norm": 0.4181155110302704, |
|
"learning_rate": 2.2251303632801276e-05, |
|
"loss": 0.7565, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.5900019007793195, |
|
"grad_norm": 0.44045436172545194, |
|
"learning_rate": 2.2225863493360255e-05, |
|
"loss": 0.743, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.5915225242349363, |
|
"grad_norm": 0.3847833966152017, |
|
"learning_rate": 2.220037141628167e-05, |
|
"loss": 0.7338, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5930431476905531, |
|
"grad_norm": 0.42280183460806886, |
|
"learning_rate": 2.2174827554172604e-05, |
|
"loss": 0.7704, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.59456377114617, |
|
"grad_norm": 0.4636797659898542, |
|
"learning_rate": 2.2149232059950155e-05, |
|
"loss": 0.8102, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5960843946017867, |
|
"grad_norm": 0.41898988989505653, |
|
"learning_rate": 2.2123585086840513e-05, |
|
"loss": 0.7743, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5976050180574035, |
|
"grad_norm": 0.4669187287489928, |
|
"learning_rate": 2.2097886788378042e-05, |
|
"loss": 0.7809, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5991256415130204, |
|
"grad_norm": 0.3896079438766509, |
|
"learning_rate": 2.2072137318404363e-05, |
|
"loss": 0.7482, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.6006462649686372, |
|
"grad_norm": 0.49028722719405726, |
|
"learning_rate": 2.2046336831067438e-05, |
|
"loss": 0.7412, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6021668884242539, |
|
"grad_norm": 0.40246044806361353, |
|
"learning_rate": 2.2020485480820634e-05, |
|
"loss": 0.736, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.6036875118798708, |
|
"grad_norm": 0.39677182146473855, |
|
"learning_rate": 2.199458342242181e-05, |
|
"loss": 0.7436, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6052081353354876, |
|
"grad_norm": 0.42836586963405954, |
|
"learning_rate": 2.196863081093239e-05, |
|
"loss": 0.7649, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.6067287587911043, |
|
"grad_norm": 0.4469293995373356, |
|
"learning_rate": 2.1942627801716423e-05, |
|
"loss": 0.7442, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6082493822467212, |
|
"grad_norm": 0.4004274410012105, |
|
"learning_rate": 2.1916574550439677e-05, |
|
"loss": 0.7694, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.609770005702338, |
|
"grad_norm": 0.4128648841658959, |
|
"learning_rate": 2.189047121306867e-05, |
|
"loss": 0.7628, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6112906291579547, |
|
"grad_norm": 0.397595260719658, |
|
"learning_rate": 2.186431794586978e-05, |
|
"loss": 0.7335, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6128112526135716, |
|
"grad_norm": 0.39655951051855476, |
|
"learning_rate": 2.1838114905408268e-05, |
|
"loss": 0.7488, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6143318760691884, |
|
"grad_norm": 0.3954068854911475, |
|
"learning_rate": 2.1811862248547378e-05, |
|
"loss": 0.7732, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6158524995248051, |
|
"grad_norm": 0.4029750154489033, |
|
"learning_rate": 2.1785560132447365e-05, |
|
"loss": 0.7635, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.617373122980422, |
|
"grad_norm": 0.4988589982990038, |
|
"learning_rate": 2.1759208714564575e-05, |
|
"loss": 0.7482, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6188937464360388, |
|
"grad_norm": 0.3830796517420403, |
|
"learning_rate": 2.1732808152650507e-05, |
|
"loss": 0.7059, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6204143698916555, |
|
"grad_norm": 0.4928324719853511, |
|
"learning_rate": 2.1706358604750843e-05, |
|
"loss": 0.75, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6219349933472724, |
|
"grad_norm": 0.41872118978417633, |
|
"learning_rate": 2.1679860229204526e-05, |
|
"loss": 0.7799, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6234556168028892, |
|
"grad_norm": 0.44112153018658945, |
|
"learning_rate": 2.1653313184642798e-05, |
|
"loss": 0.7554, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.624976240258506, |
|
"grad_norm": 0.44207424430007414, |
|
"learning_rate": 2.162671762998826e-05, |
|
"loss": 0.7303, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6264968637141228, |
|
"grad_norm": 0.4287487064749124, |
|
"learning_rate": 2.1600073724453913e-05, |
|
"loss": 0.7311, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6280174871697396, |
|
"grad_norm": 0.4795008086486292, |
|
"learning_rate": 2.157338162754221e-05, |
|
"loss": 0.7538, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6295381106253564, |
|
"grad_norm": 0.4116045721062874, |
|
"learning_rate": 2.1546641499044105e-05, |
|
"loss": 0.7526, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6310587340809732, |
|
"grad_norm": 0.4601608034979367, |
|
"learning_rate": 2.1519853499038075e-05, |
|
"loss": 0.7891, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.63257935753659, |
|
"grad_norm": 0.43000044965092804, |
|
"learning_rate": 2.14930177878892e-05, |
|
"loss": 0.7347, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.6340999809922068, |
|
"grad_norm": 0.43136823831520454, |
|
"learning_rate": 2.1466134526248154e-05, |
|
"loss": 0.778, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6356206044478236, |
|
"grad_norm": 0.40082789389914825, |
|
"learning_rate": 2.1439203875050295e-05, |
|
"loss": 0.7435, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6371412279034404, |
|
"grad_norm": 0.46281961581743375, |
|
"learning_rate": 2.1412225995514658e-05, |
|
"loss": 0.7644, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6386618513590572, |
|
"grad_norm": 0.39180679623310466, |
|
"learning_rate": 2.1385201049143025e-05, |
|
"loss": 0.7401, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.640182474814674, |
|
"grad_norm": 0.44410256733578407, |
|
"learning_rate": 2.135812919771892e-05, |
|
"loss": 0.749, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.6417030982702908, |
|
"grad_norm": 0.4332510457584124, |
|
"learning_rate": 2.133101060330668e-05, |
|
"loss": 0.724, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6432237217259076, |
|
"grad_norm": 0.434836178784241, |
|
"learning_rate": 2.1303845428250453e-05, |
|
"loss": 0.7905, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6447443451815245, |
|
"grad_norm": 0.5203348960832226, |
|
"learning_rate": 2.127663383517326e-05, |
|
"loss": 0.7775, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6462649686371412, |
|
"grad_norm": 0.4239061363149275, |
|
"learning_rate": 2.1249375986975975e-05, |
|
"loss": 0.7547, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.647785592092758, |
|
"grad_norm": 0.5531850395502146, |
|
"learning_rate": 2.1222072046836397e-05, |
|
"loss": 0.7562, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.6493062155483749, |
|
"grad_norm": 0.44407831554002236, |
|
"learning_rate": 2.119472217820824e-05, |
|
"loss": 0.7563, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6508268390039916, |
|
"grad_norm": 0.46682309596014615, |
|
"learning_rate": 2.1167326544820175e-05, |
|
"loss": 0.757, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.6523474624596084, |
|
"grad_norm": 0.4356058035250766, |
|
"learning_rate": 2.1139885310674832e-05, |
|
"loss": 0.7598, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.6538680859152253, |
|
"grad_norm": 0.4201883171891972, |
|
"learning_rate": 2.1112398640047842e-05, |
|
"loss": 0.7347, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.655388709370842, |
|
"grad_norm": 0.42395223035483165, |
|
"learning_rate": 2.1084866697486813e-05, |
|
"loss": 0.7414, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6569093328264588, |
|
"grad_norm": 0.39048036764414096, |
|
"learning_rate": 2.1057289647810407e-05, |
|
"loss": 0.7338, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.6584299562820757, |
|
"grad_norm": 0.44299976473368746, |
|
"learning_rate": 2.1029667656107295e-05, |
|
"loss": 0.7791, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.6599505797376924, |
|
"grad_norm": 0.500893872138014, |
|
"learning_rate": 2.100200088773519e-05, |
|
"loss": 0.7543, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.6614712031933092, |
|
"grad_norm": 0.49372215416444076, |
|
"learning_rate": 2.0974289508319867e-05, |
|
"loss": 0.7676, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6629918266489261, |
|
"grad_norm": 0.46705701804279404, |
|
"learning_rate": 2.094653368375416e-05, |
|
"loss": 0.7631, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.6645124501045429, |
|
"grad_norm": 0.5152291637276656, |
|
"learning_rate": 2.0918733580196976e-05, |
|
"loss": 0.778, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.6660330735601596, |
|
"grad_norm": 0.4597895523784997, |
|
"learning_rate": 2.089088936407228e-05, |
|
"loss": 0.7411, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.6675536970157765, |
|
"grad_norm": 0.43780405947061785, |
|
"learning_rate": 2.0863001202068135e-05, |
|
"loss": 0.7234, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.6690743204713933, |
|
"grad_norm": 0.48252738543172174, |
|
"learning_rate": 2.083506926113568e-05, |
|
"loss": 0.7537, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.67059494392701, |
|
"grad_norm": 0.386483976594262, |
|
"learning_rate": 2.080709370848812e-05, |
|
"loss": 0.7385, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.6721155673826269, |
|
"grad_norm": 0.4545211695758658, |
|
"learning_rate": 2.077907471159975e-05, |
|
"loss": 0.7669, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.6736361908382437, |
|
"grad_norm": 0.4105864152211242, |
|
"learning_rate": 2.075101243820494e-05, |
|
"loss": 0.748, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.6751568142938604, |
|
"grad_norm": 0.4504033111976225, |
|
"learning_rate": 2.0722907056297144e-05, |
|
"loss": 0.7364, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.6766774377494773, |
|
"grad_norm": 0.43800556294426285, |
|
"learning_rate": 2.0694758734127862e-05, |
|
"loss": 0.7816, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6781980612050941, |
|
"grad_norm": 0.4525088971872918, |
|
"learning_rate": 2.066656764020567e-05, |
|
"loss": 0.7795, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.6797186846607108, |
|
"grad_norm": 0.48236560996254046, |
|
"learning_rate": 2.0638333943295188e-05, |
|
"loss": 0.8176, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.6812393081163277, |
|
"grad_norm": 0.41583212690425186, |
|
"learning_rate": 2.0610057812416076e-05, |
|
"loss": 0.7233, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.6827599315719445, |
|
"grad_norm": 0.45116571513530795, |
|
"learning_rate": 2.0581739416842023e-05, |
|
"loss": 0.7537, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.6842805550275614, |
|
"grad_norm": 0.3988713943698696, |
|
"learning_rate": 2.0553378926099744e-05, |
|
"loss": 0.7815, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6858011784831781, |
|
"grad_norm": 0.4119407206884566, |
|
"learning_rate": 2.0524976509967945e-05, |
|
"loss": 0.7884, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.6873218019387949, |
|
"grad_norm": 0.4826174722158836, |
|
"learning_rate": 2.0496532338476314e-05, |
|
"loss": 0.7514, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.6888424253944118, |
|
"grad_norm": 0.4442915093907275, |
|
"learning_rate": 2.046804658190451e-05, |
|
"loss": 0.7724, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.6903630488500285, |
|
"grad_norm": 0.4078747197054875, |
|
"learning_rate": 2.0439519410781135e-05, |
|
"loss": 0.7422, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.6918836723056453, |
|
"grad_norm": 0.40352123173925075, |
|
"learning_rate": 2.0410950995882723e-05, |
|
"loss": 0.7473, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6934042957612622, |
|
"grad_norm": 0.37505514036015986, |
|
"learning_rate": 2.0382341508232713e-05, |
|
"loss": 0.7315, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.6949249192168789, |
|
"grad_norm": 0.4001378862906619, |
|
"learning_rate": 2.0353691119100415e-05, |
|
"loss": 0.7503, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.6964455426724957, |
|
"grad_norm": 0.4227756951676847, |
|
"learning_rate": 2.0325e-05, |
|
"loss": 0.7604, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.6979661661281126, |
|
"grad_norm": 0.3771305192233914, |
|
"learning_rate": 2.029626832268947e-05, |
|
"loss": 0.7336, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.6994867895837293, |
|
"grad_norm": 0.36251000512548065, |
|
"learning_rate": 2.0267496259169623e-05, |
|
"loss": 0.7139, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7010074130393461, |
|
"grad_norm": 0.3633369727991047, |
|
"learning_rate": 2.023868398168302e-05, |
|
"loss": 0.7254, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.702528036494963, |
|
"grad_norm": 0.3722032410661908, |
|
"learning_rate": 2.0209831662712973e-05, |
|
"loss": 0.7291, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7040486599505797, |
|
"grad_norm": 0.39237895415677215, |
|
"learning_rate": 2.0180939474982496e-05, |
|
"loss": 0.7807, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.7055692834061965, |
|
"grad_norm": 0.38128737748687397, |
|
"learning_rate": 2.015200759145327e-05, |
|
"loss": 0.7525, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.7070899068618134, |
|
"grad_norm": 0.39388751320764215, |
|
"learning_rate": 2.0123036185324624e-05, |
|
"loss": 0.765, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7086105303174302, |
|
"grad_norm": 0.4161404920966044, |
|
"learning_rate": 2.0094025430032477e-05, |
|
"loss": 0.7746, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7101311537730469, |
|
"grad_norm": 0.38567602060145806, |
|
"learning_rate": 2.006497549924831e-05, |
|
"loss": 0.7525, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7116517772286638, |
|
"grad_norm": 0.3878678822202346, |
|
"learning_rate": 2.0035886566878142e-05, |
|
"loss": 0.7363, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7131724006842806, |
|
"grad_norm": 0.4189479849973627, |
|
"learning_rate": 2.0006758807061447e-05, |
|
"loss": 0.7202, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.7146930241398973, |
|
"grad_norm": 0.35897122699509787, |
|
"learning_rate": 1.997759239417016e-05, |
|
"loss": 0.711, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7162136475955142, |
|
"grad_norm": 0.3719817122737225, |
|
"learning_rate": 1.9948387502807602e-05, |
|
"loss": 0.7329, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.717734271051131, |
|
"grad_norm": 0.38587499281174487, |
|
"learning_rate": 1.991914430780744e-05, |
|
"loss": 0.7414, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7192548945067477, |
|
"grad_norm": 0.40031974274913, |
|
"learning_rate": 1.988986298423265e-05, |
|
"loss": 0.7578, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7207755179623646, |
|
"grad_norm": 0.46518346734697724, |
|
"learning_rate": 1.986054370737446e-05, |
|
"loss": 0.7399, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.7222961414179814, |
|
"grad_norm": 0.34848519411774526, |
|
"learning_rate": 1.983118665275131e-05, |
|
"loss": 0.7102, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7238167648735981, |
|
"grad_norm": 0.3891532280462324, |
|
"learning_rate": 1.9801791996107777e-05, |
|
"loss": 0.7307, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.725337388329215, |
|
"grad_norm": 0.4460610364235613, |
|
"learning_rate": 1.9772359913413563e-05, |
|
"loss": 0.7933, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7268580117848318, |
|
"grad_norm": 0.3931330019630469, |
|
"learning_rate": 1.9742890580862405e-05, |
|
"loss": 0.7626, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7283786352404485, |
|
"grad_norm": 0.3869696043446492, |
|
"learning_rate": 1.9713384174871036e-05, |
|
"loss": 0.7652, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.7298992586960654, |
|
"grad_norm": 0.4142673906264668, |
|
"learning_rate": 1.9683840872078135e-05, |
|
"loss": 0.7683, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7314198821516822, |
|
"grad_norm": 0.3758360055485875, |
|
"learning_rate": 1.965426084934325e-05, |
|
"loss": 0.7352, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.732940505607299, |
|
"grad_norm": 0.3957928284148903, |
|
"learning_rate": 1.9624644283745763e-05, |
|
"loss": 0.7709, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.7344611290629158, |
|
"grad_norm": 0.4166452839891987, |
|
"learning_rate": 1.959499135258381e-05, |
|
"loss": 0.7428, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7359817525185326, |
|
"grad_norm": 0.3930427010522597, |
|
"learning_rate": 1.9565302233373233e-05, |
|
"loss": 0.7239, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.7375023759741494, |
|
"grad_norm": 0.3601178498038616, |
|
"learning_rate": 1.9535577103846507e-05, |
|
"loss": 0.7099, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7390229994297662, |
|
"grad_norm": 0.4753582216812286, |
|
"learning_rate": 1.950581614195169e-05, |
|
"loss": 0.749, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.740543622885383, |
|
"grad_norm": 0.3946913425771556, |
|
"learning_rate": 1.9476019525851328e-05, |
|
"loss": 0.7461, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.7420642463409998, |
|
"grad_norm": 0.408093102930642, |
|
"learning_rate": 1.9446187433921438e-05, |
|
"loss": 0.7433, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.7435848697966166, |
|
"grad_norm": 0.4409320643561924, |
|
"learning_rate": 1.941632004475039e-05, |
|
"loss": 0.7419, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7451054932522334, |
|
"grad_norm": 0.4107498993063142, |
|
"learning_rate": 1.938641753713787e-05, |
|
"loss": 0.7663, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7466261167078502, |
|
"grad_norm": 0.3992120160222559, |
|
"learning_rate": 1.9356480090093787e-05, |
|
"loss": 0.7549, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.748146740163467, |
|
"grad_norm": 0.4323109678553807, |
|
"learning_rate": 1.9326507882837228e-05, |
|
"loss": 0.7578, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7496673636190838, |
|
"grad_norm": 0.4188967295796775, |
|
"learning_rate": 1.929650109479536e-05, |
|
"loss": 0.7623, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7511879870747006, |
|
"grad_norm": 0.4016307344915274, |
|
"learning_rate": 1.9266459905602376e-05, |
|
"loss": 0.7504, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.7527086105303175, |
|
"grad_norm": 0.45431926716757953, |
|
"learning_rate": 1.9236384495098395e-05, |
|
"loss": 0.7569, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.7542292339859342, |
|
"grad_norm": 0.37461201818259915, |
|
"learning_rate": 1.9206275043328413e-05, |
|
"loss": 0.7228, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.755749857441551, |
|
"grad_norm": 0.44795894634744293, |
|
"learning_rate": 1.9176131730541206e-05, |
|
"loss": 0.7204, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.7572704808971679, |
|
"grad_norm": 0.3739926769832927, |
|
"learning_rate": 1.9145954737188265e-05, |
|
"loss": 0.7123, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.7587911043527846, |
|
"grad_norm": 0.39711092135289616, |
|
"learning_rate": 1.911574424392269e-05, |
|
"loss": 0.7532, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.7603117278084014, |
|
"grad_norm": 0.3754721725582843, |
|
"learning_rate": 1.9085500431598156e-05, |
|
"loss": 0.6882, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7618323512640183, |
|
"grad_norm": 0.39959794535350796, |
|
"learning_rate": 1.9055223481267776e-05, |
|
"loss": 0.7479, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.763352974719635, |
|
"grad_norm": 0.35365524692767664, |
|
"learning_rate": 1.9024913574183046e-05, |
|
"loss": 0.7487, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.7648735981752518, |
|
"grad_norm": 0.4166202806824158, |
|
"learning_rate": 1.899457089179276e-05, |
|
"loss": 0.7574, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.7663942216308687, |
|
"grad_norm": 0.3915634338240503, |
|
"learning_rate": 1.896419561574193e-05, |
|
"loss": 0.7325, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.7679148450864854, |
|
"grad_norm": 0.48602884460303647, |
|
"learning_rate": 1.8933787927870668e-05, |
|
"loss": 0.7286, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7694354685421022, |
|
"grad_norm": 0.36368946637812705, |
|
"learning_rate": 1.890334801021313e-05, |
|
"loss": 0.7562, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.7709560919977191, |
|
"grad_norm": 0.45475514908964265, |
|
"learning_rate": 1.8872876044996417e-05, |
|
"loss": 0.7414, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.7724767154533358, |
|
"grad_norm": 0.4086589491103268, |
|
"learning_rate": 1.8842372214639475e-05, |
|
"loss": 0.7661, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.7739973389089527, |
|
"grad_norm": 0.4038130105419086, |
|
"learning_rate": 1.8811836701752004e-05, |
|
"loss": 0.733, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.7755179623645695, |
|
"grad_norm": 0.4234636977161692, |
|
"learning_rate": 1.8781269689133386e-05, |
|
"loss": 0.8073, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7770385858201863, |
|
"grad_norm": 0.395363248052582, |
|
"learning_rate": 1.8750671359771563e-05, |
|
"loss": 0.7298, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.7785592092758031, |
|
"grad_norm": 0.49129541089929685, |
|
"learning_rate": 1.8720041896841943e-05, |
|
"loss": 0.7118, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.7800798327314199, |
|
"grad_norm": 0.41442318450837484, |
|
"learning_rate": 1.868938148370634e-05, |
|
"loss": 0.7451, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.7816004561870367, |
|
"grad_norm": 0.4332314377021711, |
|
"learning_rate": 1.8658690303911834e-05, |
|
"loss": 0.7456, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.7831210796426535, |
|
"grad_norm": 0.49756006836526884, |
|
"learning_rate": 1.8627968541189676e-05, |
|
"loss": 0.7449, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7846417030982703, |
|
"grad_norm": 0.39053028376926463, |
|
"learning_rate": 1.8597216379454236e-05, |
|
"loss": 0.7353, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.7861623265538871, |
|
"grad_norm": 0.42683269902895027, |
|
"learning_rate": 1.8566434002801828e-05, |
|
"loss": 0.73, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.787682950009504, |
|
"grad_norm": 0.4414935000692991, |
|
"learning_rate": 1.8535621595509665e-05, |
|
"loss": 0.7222, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.7892035734651207, |
|
"grad_norm": 0.36090118649581926, |
|
"learning_rate": 1.8504779342034735e-05, |
|
"loss": 0.7206, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.7907241969207375, |
|
"grad_norm": 0.44956102255887004, |
|
"learning_rate": 1.8473907427012702e-05, |
|
"loss": 0.7809, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7922448203763544, |
|
"grad_norm": 0.400049613455352, |
|
"learning_rate": 1.8443006035256794e-05, |
|
"loss": 0.7165, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.7937654438319711, |
|
"grad_norm": 0.4096379673462462, |
|
"learning_rate": 1.8412075351756694e-05, |
|
"loss": 0.7188, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.7952860672875879, |
|
"grad_norm": 0.3813329658819688, |
|
"learning_rate": 1.8381115561677448e-05, |
|
"loss": 0.7434, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.7968066907432048, |
|
"grad_norm": 0.3883504678303308, |
|
"learning_rate": 1.835012685035834e-05, |
|
"loss": 0.7538, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.7983273141988215, |
|
"grad_norm": 0.4810224536969676, |
|
"learning_rate": 1.831910940331179e-05, |
|
"loss": 0.732, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7998479376544383, |
|
"grad_norm": 0.4274875955939722, |
|
"learning_rate": 1.828806340622226e-05, |
|
"loss": 0.7654, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.8013685611100552, |
|
"grad_norm": 0.6167789867453826, |
|
"learning_rate": 1.8256989044945094e-05, |
|
"loss": 0.723, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8028891845656719, |
|
"grad_norm": 0.38651635512015386, |
|
"learning_rate": 1.8225886505505468e-05, |
|
"loss": 0.7301, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8044098080212887, |
|
"grad_norm": 0.5105848984644641, |
|
"learning_rate": 1.8194755974097218e-05, |
|
"loss": 0.7379, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8059304314769056, |
|
"grad_norm": 0.4337444604279403, |
|
"learning_rate": 1.8163597637081783e-05, |
|
"loss": 0.7438, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8074510549325223, |
|
"grad_norm": 0.38311547081568653, |
|
"learning_rate": 1.8132411680987025e-05, |
|
"loss": 0.7166, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.8089716783881391, |
|
"grad_norm": 0.40940875061446746, |
|
"learning_rate": 1.8101198292506176e-05, |
|
"loss": 0.7377, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.810492301843756, |
|
"grad_norm": 0.39069303091376745, |
|
"learning_rate": 1.8069957658496668e-05, |
|
"loss": 0.7362, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.8120129252993727, |
|
"grad_norm": 0.4034863997233365, |
|
"learning_rate": 1.8038689965979057e-05, |
|
"loss": 0.7845, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8135335487549895, |
|
"grad_norm": 0.3743182233611382, |
|
"learning_rate": 1.8007395402135864e-05, |
|
"loss": 0.7249, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8150541722106064, |
|
"grad_norm": 0.4151612507277004, |
|
"learning_rate": 1.7976074154310488e-05, |
|
"loss": 0.7501, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8165747956662232, |
|
"grad_norm": 0.4052560608313417, |
|
"learning_rate": 1.7944726410006063e-05, |
|
"loss": 0.7397, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.8180954191218399, |
|
"grad_norm": 0.4204385753908119, |
|
"learning_rate": 1.7913352356884348e-05, |
|
"loss": 0.7269, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.8196160425774568, |
|
"grad_norm": 0.36912065852511794, |
|
"learning_rate": 1.7881952182764595e-05, |
|
"loss": 0.7216, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8211366660330736, |
|
"grad_norm": 0.4091528689771071, |
|
"learning_rate": 1.785052607562243e-05, |
|
"loss": 0.741, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8226572894886903, |
|
"grad_norm": 0.3712296997315177, |
|
"learning_rate": 1.7819074223588718e-05, |
|
"loss": 0.7361, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.8241779129443072, |
|
"grad_norm": 0.41163387712501454, |
|
"learning_rate": 1.7787596814948465e-05, |
|
"loss": 0.7153, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.825698536399924, |
|
"grad_norm": 0.3881722683934304, |
|
"learning_rate": 1.7756094038139645e-05, |
|
"loss": 0.7354, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.8272191598555407, |
|
"grad_norm": 0.37426692455200733, |
|
"learning_rate": 1.7724566081752113e-05, |
|
"loss": 0.7132, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.8287397833111576, |
|
"grad_norm": 0.36537784686372354, |
|
"learning_rate": 1.7693013134526457e-05, |
|
"loss": 0.6791, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8302604067667744, |
|
"grad_norm": 0.3789388855291124, |
|
"learning_rate": 1.7661435385352875e-05, |
|
"loss": 0.7158, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8317810302223911, |
|
"grad_norm": 0.41030137404819805, |
|
"learning_rate": 1.762983302327003e-05, |
|
"loss": 0.6648, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.833301653678008, |
|
"grad_norm": 0.3866957065876247, |
|
"learning_rate": 1.7598206237463935e-05, |
|
"loss": 0.7383, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.8348222771336248, |
|
"grad_norm": 0.42152702106156975, |
|
"learning_rate": 1.7566555217266816e-05, |
|
"loss": 0.7486, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.8363429005892415, |
|
"grad_norm": 0.3837180537567676, |
|
"learning_rate": 1.753488015215598e-05, |
|
"loss": 0.7139, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8378635240448584, |
|
"grad_norm": 0.45359893466752715, |
|
"learning_rate": 1.750318123175267e-05, |
|
"loss": 0.7415, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.8393841475004752, |
|
"grad_norm": 0.37167124021677417, |
|
"learning_rate": 1.7471458645820944e-05, |
|
"loss": 0.7341, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.840904770956092, |
|
"grad_norm": 0.386261837626807, |
|
"learning_rate": 1.7439712584266523e-05, |
|
"loss": 0.7753, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8424253944117088, |
|
"grad_norm": 0.3607815357285951, |
|
"learning_rate": 1.740794323713568e-05, |
|
"loss": 0.7295, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.8439460178673256, |
|
"grad_norm": 0.3805870265080064, |
|
"learning_rate": 1.7376150794614067e-05, |
|
"loss": 0.742, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8454666413229424, |
|
"grad_norm": 0.3634857699838318, |
|
"learning_rate": 1.7344335447025617e-05, |
|
"loss": 0.7405, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.8469872647785592, |
|
"grad_norm": 0.3830792827233582, |
|
"learning_rate": 1.731249738483136e-05, |
|
"loss": 0.7593, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.848507888234176, |
|
"grad_norm": 0.37680323864124654, |
|
"learning_rate": 1.728063679862832e-05, |
|
"loss": 0.7374, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.8500285116897928, |
|
"grad_norm": 0.3812378150946191, |
|
"learning_rate": 1.724875387914836e-05, |
|
"loss": 0.7317, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.8515491351454096, |
|
"grad_norm": 0.3955303602601571, |
|
"learning_rate": 1.7216848817257034e-05, |
|
"loss": 0.7558, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8530697586010264, |
|
"grad_norm": 0.36089368326825794, |
|
"learning_rate": 1.718492180395245e-05, |
|
"loss": 0.7124, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.8545903820566432, |
|
"grad_norm": 0.39264324951073387, |
|
"learning_rate": 1.715297303036414e-05, |
|
"loss": 0.6976, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.85611100551226, |
|
"grad_norm": 0.3583167479634998, |
|
"learning_rate": 1.712100268775188e-05, |
|
"loss": 0.7459, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.8576316289678768, |
|
"grad_norm": 0.3652537723370093, |
|
"learning_rate": 1.708901096750459e-05, |
|
"loss": 0.752, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.8591522524234936, |
|
"grad_norm": 0.35829414089305417, |
|
"learning_rate": 1.7056998061139157e-05, |
|
"loss": 0.7293, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.8606728758791105, |
|
"grad_norm": 0.41471406367802255, |
|
"learning_rate": 1.70249641602993e-05, |
|
"loss": 0.7134, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.8621934993347272, |
|
"grad_norm": 0.40497506621492396, |
|
"learning_rate": 1.6992909456754408e-05, |
|
"loss": 0.7266, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.8637141227903441, |
|
"grad_norm": 0.5118453814476593, |
|
"learning_rate": 1.6960834142398424e-05, |
|
"loss": 0.7524, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.8652347462459609, |
|
"grad_norm": 0.4027525684201806, |
|
"learning_rate": 1.6928738409248668e-05, |
|
"loss": 0.7209, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.8667553697015776, |
|
"grad_norm": 0.39420637097855954, |
|
"learning_rate": 1.689662244944469e-05, |
|
"loss": 0.7445, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.8682759931571945, |
|
"grad_norm": 0.4106148870965668, |
|
"learning_rate": 1.686448645524714e-05, |
|
"loss": 0.7655, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.8697966166128113, |
|
"grad_norm": 0.3579948523808814, |
|
"learning_rate": 1.6832330619036584e-05, |
|
"loss": 0.7165, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.871317240068428, |
|
"grad_norm": 0.3751959854555261, |
|
"learning_rate": 1.6800155133312387e-05, |
|
"loss": 0.7708, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.8728378635240449, |
|
"grad_norm": 0.39336551188405533, |
|
"learning_rate": 1.6767960190691538e-05, |
|
"loss": 0.7161, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.8743584869796617, |
|
"grad_norm": 0.38785678050667954, |
|
"learning_rate": 1.6735745983907504e-05, |
|
"loss": 0.719, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.8758791104352784, |
|
"grad_norm": 0.3696861272192392, |
|
"learning_rate": 1.6703512705809085e-05, |
|
"loss": 0.7434, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.8773997338908953, |
|
"grad_norm": 0.3807058540190109, |
|
"learning_rate": 1.667126054935923e-05, |
|
"loss": 0.7336, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.8789203573465121, |
|
"grad_norm": 0.3907235180627425, |
|
"learning_rate": 1.6638989707633927e-05, |
|
"loss": 0.723, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.8804409808021288, |
|
"grad_norm": 0.39300541368827063, |
|
"learning_rate": 1.6606700373821005e-05, |
|
"loss": 0.7313, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.8819616042577457, |
|
"grad_norm": 0.3994037757594598, |
|
"learning_rate": 1.6574392741219003e-05, |
|
"loss": 0.7366, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8834822277133625, |
|
"grad_norm": 0.35949981685128496, |
|
"learning_rate": 1.6542067003236e-05, |
|
"loss": 0.7288, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.8850028511689793, |
|
"grad_norm": 0.3987872705670089, |
|
"learning_rate": 1.6509723353388463e-05, |
|
"loss": 0.6909, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.8865234746245961, |
|
"grad_norm": 0.3829862590465747, |
|
"learning_rate": 1.6477361985300094e-05, |
|
"loss": 0.6989, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.8880440980802129, |
|
"grad_norm": 0.39080192305445455, |
|
"learning_rate": 1.6444983092700657e-05, |
|
"loss": 0.7357, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.8895647215358297, |
|
"grad_norm": 0.39244835712820425, |
|
"learning_rate": 1.6412586869424823e-05, |
|
"loss": 0.6906, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8910853449914465, |
|
"grad_norm": 0.38987649746021225, |
|
"learning_rate": 1.6380173509411024e-05, |
|
"loss": 0.7655, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.8926059684470633, |
|
"grad_norm": 0.3792483881544404, |
|
"learning_rate": 1.634774320670027e-05, |
|
"loss": 0.7414, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.8941265919026801, |
|
"grad_norm": 0.37786695217123767, |
|
"learning_rate": 1.6315296155435003e-05, |
|
"loss": 0.7523, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.8956472153582969, |
|
"grad_norm": 0.40039846555450576, |
|
"learning_rate": 1.6282832549857926e-05, |
|
"loss": 0.7358, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.8971678388139137, |
|
"grad_norm": 0.3928470737436745, |
|
"learning_rate": 1.6250352584310847e-05, |
|
"loss": 0.755, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8986884622695305, |
|
"grad_norm": 0.3903264232496443, |
|
"learning_rate": 1.6217856453233505e-05, |
|
"loss": 0.7334, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.9002090857251474, |
|
"grad_norm": 0.3685085038233517, |
|
"learning_rate": 1.6185344351162427e-05, |
|
"loss": 0.7137, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.9017297091807641, |
|
"grad_norm": 0.39206546198749437, |
|
"learning_rate": 1.6152816472729734e-05, |
|
"loss": 0.7451, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.9032503326363809, |
|
"grad_norm": 0.379717976951851, |
|
"learning_rate": 1.6120273012662e-05, |
|
"loss": 0.7153, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.9047709560919978, |
|
"grad_norm": 0.38621564008551273, |
|
"learning_rate": 1.608771416577908e-05, |
|
"loss": 0.7559, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9062915795476145, |
|
"grad_norm": 0.4060366023992974, |
|
"learning_rate": 1.6055140126992933e-05, |
|
"loss": 0.7386, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.9078122030032313, |
|
"grad_norm": 0.35024041522069665, |
|
"learning_rate": 1.6022551091306466e-05, |
|
"loss": 0.7248, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.9093328264588482, |
|
"grad_norm": 0.4429630612789365, |
|
"learning_rate": 1.5989947253812372e-05, |
|
"loss": 0.7344, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.9108534499144649, |
|
"grad_norm": 0.35160112786541303, |
|
"learning_rate": 1.5957328809691947e-05, |
|
"loss": 0.7136, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.9123740733700817, |
|
"grad_norm": 0.3607594118044688, |
|
"learning_rate": 1.592469595421393e-05, |
|
"loss": 0.7152, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9138946968256986, |
|
"grad_norm": 0.3888604747890697, |
|
"learning_rate": 1.589204888273333e-05, |
|
"loss": 0.7214, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.9154153202813153, |
|
"grad_norm": 0.36939791749863715, |
|
"learning_rate": 1.585938779069027e-05, |
|
"loss": 0.724, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.9169359437369321, |
|
"grad_norm": 0.3492211353535467, |
|
"learning_rate": 1.5826712873608794e-05, |
|
"loss": 0.7059, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.918456567192549, |
|
"grad_norm": 0.3485676743224654, |
|
"learning_rate": 1.5794024327095722e-05, |
|
"loss": 0.6882, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.9199771906481657, |
|
"grad_norm": 0.34637043053576727, |
|
"learning_rate": 1.5761322346839444e-05, |
|
"loss": 0.7329, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9214978141037825, |
|
"grad_norm": 0.39153636395112856, |
|
"learning_rate": 1.5728607128608797e-05, |
|
"loss": 0.7039, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.9230184375593994, |
|
"grad_norm": 0.36173214969406103, |
|
"learning_rate": 1.5695878868251847e-05, |
|
"loss": 0.7238, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.9245390610150162, |
|
"grad_norm": 0.3761817399411723, |
|
"learning_rate": 1.5663137761694748e-05, |
|
"loss": 0.7287, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.9260596844706329, |
|
"grad_norm": 0.35432511798173993, |
|
"learning_rate": 1.5630384004940545e-05, |
|
"loss": 0.7169, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9275803079262498, |
|
"grad_norm": 0.37885488266265444, |
|
"learning_rate": 1.559761779406803e-05, |
|
"loss": 0.7456, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9291009313818666, |
|
"grad_norm": 0.3834340488104107, |
|
"learning_rate": 1.5564839325230532e-05, |
|
"loss": 0.7145, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.9306215548374833, |
|
"grad_norm": 0.36202133079807264, |
|
"learning_rate": 1.5532048794654782e-05, |
|
"loss": 0.7408, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.9321421782931002, |
|
"grad_norm": 0.39335695813338395, |
|
"learning_rate": 1.549924639863969e-05, |
|
"loss": 0.7403, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.933662801748717, |
|
"grad_norm": 0.3933661187801479, |
|
"learning_rate": 1.546643233355523e-05, |
|
"loss": 0.7082, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.9351834252043337, |
|
"grad_norm": 0.37654144207241497, |
|
"learning_rate": 1.5433606795841207e-05, |
|
"loss": 0.7435, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9367040486599506, |
|
"grad_norm": 0.44483184731571473, |
|
"learning_rate": 1.5400769982006128e-05, |
|
"loss": 0.7156, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9382246721155674, |
|
"grad_norm": 0.42682697285522647, |
|
"learning_rate": 1.536792208862598e-05, |
|
"loss": 0.696, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.9397452955711841, |
|
"grad_norm": 0.40979396640326593, |
|
"learning_rate": 1.5335063312343095e-05, |
|
"loss": 0.727, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.941265919026801, |
|
"grad_norm": 0.3960793962289382, |
|
"learning_rate": 1.530219384986495e-05, |
|
"loss": 0.7032, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.9427865424824178, |
|
"grad_norm": 0.3857008168238225, |
|
"learning_rate": 1.5269313897962994e-05, |
|
"loss": 0.7502, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9443071659380345, |
|
"grad_norm": 0.3982600098061033, |
|
"learning_rate": 1.5236423653471463e-05, |
|
"loss": 0.7666, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.9458277893936514, |
|
"grad_norm": 0.41685987364222565, |
|
"learning_rate": 1.5203523313286227e-05, |
|
"loss": 0.7036, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.9473484128492682, |
|
"grad_norm": 0.3959161081756824, |
|
"learning_rate": 1.5170613074363573e-05, |
|
"loss": 0.7311, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.948869036304885, |
|
"grad_norm": 0.36920330872863855, |
|
"learning_rate": 1.5137693133719065e-05, |
|
"loss": 0.7039, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.9503896597605018, |
|
"grad_norm": 0.39113501294384384, |
|
"learning_rate": 1.5104763688426331e-05, |
|
"loss": 0.7506, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.9519102832161186, |
|
"grad_norm": 0.39144353205492227, |
|
"learning_rate": 1.5071824935615908e-05, |
|
"loss": 0.6886, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.9534309066717354, |
|
"grad_norm": 0.36594301273514485, |
|
"learning_rate": 1.5038877072474041e-05, |
|
"loss": 0.7444, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.9549515301273522, |
|
"grad_norm": 0.3954444253170849, |
|
"learning_rate": 1.500592029624153e-05, |
|
"loss": 0.7147, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.956472153582969, |
|
"grad_norm": 0.3773327611421358, |
|
"learning_rate": 1.4972954804212516e-05, |
|
"loss": 0.7453, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.9579927770385859, |
|
"grad_norm": 0.41652271596518264, |
|
"learning_rate": 1.4939980793733329e-05, |
|
"loss": 0.7437, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9595134004942026, |
|
"grad_norm": 0.4094773012771971, |
|
"learning_rate": 1.4906998462201288e-05, |
|
"loss": 0.7654, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.9610340239498194, |
|
"grad_norm": 0.3944195978868427, |
|
"learning_rate": 1.4874008007063523e-05, |
|
"loss": 0.7452, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.9625546474054363, |
|
"grad_norm": 0.3707638628881551, |
|
"learning_rate": 1.48410096258158e-05, |
|
"loss": 0.7042, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.964075270861053, |
|
"grad_norm": 0.3755039123645331, |
|
"learning_rate": 1.4808003516001344e-05, |
|
"loss": 0.7177, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.9655958943166698, |
|
"grad_norm": 0.3723560524603749, |
|
"learning_rate": 1.4774989875209624e-05, |
|
"loss": 0.7553, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9671165177722867, |
|
"grad_norm": 0.3651623249941728, |
|
"learning_rate": 1.474196890107521e-05, |
|
"loss": 0.7404, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.9686371412279035, |
|
"grad_norm": 0.37966360420328005, |
|
"learning_rate": 1.4708940791276568e-05, |
|
"loss": 0.7159, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.9701577646835202, |
|
"grad_norm": 0.36168885071485135, |
|
"learning_rate": 1.4675905743534884e-05, |
|
"loss": 0.7541, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.9716783881391371, |
|
"grad_norm": 0.36095700915855466, |
|
"learning_rate": 1.464286395561287e-05, |
|
"loss": 0.7092, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.9731990115947539, |
|
"grad_norm": 0.43805492394151224, |
|
"learning_rate": 1.46098156253136e-05, |
|
"loss": 0.7362, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9747196350503706, |
|
"grad_norm": 0.36203353195893573, |
|
"learning_rate": 1.4576760950479295e-05, |
|
"loss": 0.7106, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.9762402585059875, |
|
"grad_norm": 0.40877067220155616, |
|
"learning_rate": 1.454370012899018e-05, |
|
"loss": 0.7026, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.9777608819616043, |
|
"grad_norm": 0.40374065798245223, |
|
"learning_rate": 1.4510633358763258e-05, |
|
"loss": 0.7182, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.979281505417221, |
|
"grad_norm": 0.3659907217224348, |
|
"learning_rate": 1.4477560837751156e-05, |
|
"loss": 0.713, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.9808021288728379, |
|
"grad_norm": 0.3622601843846982, |
|
"learning_rate": 1.4444482763940919e-05, |
|
"loss": 0.7327, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.9823227523284547, |
|
"grad_norm": 0.3885583144262136, |
|
"learning_rate": 1.441139933535284e-05, |
|
"loss": 0.6978, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.9838433757840714, |
|
"grad_norm": 0.37410385831599374, |
|
"learning_rate": 1.437831075003926e-05, |
|
"loss": 0.724, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.9853639992396883, |
|
"grad_norm": 0.3696274971269831, |
|
"learning_rate": 1.4345217206083407e-05, |
|
"loss": 0.6965, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.9868846226953051, |
|
"grad_norm": 0.41889852029503616, |
|
"learning_rate": 1.4312118901598172e-05, |
|
"loss": 0.7678, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.9884052461509218, |
|
"grad_norm": 0.3429687194640586, |
|
"learning_rate": 1.4279016034724961e-05, |
|
"loss": 0.7047, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9899258696065387, |
|
"grad_norm": 0.3736752188164251, |
|
"learning_rate": 1.4245908803632486e-05, |
|
"loss": 0.7362, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.9914464930621555, |
|
"grad_norm": 0.38265917497289204, |
|
"learning_rate": 1.4212797406515585e-05, |
|
"loss": 0.7225, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.9929671165177723, |
|
"grad_norm": 0.37332104277853684, |
|
"learning_rate": 1.417968204159404e-05, |
|
"loss": 0.7312, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.9944877399733891, |
|
"grad_norm": 0.42232449088131074, |
|
"learning_rate": 1.4146562907111377e-05, |
|
"loss": 0.7095, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.9960083634290059, |
|
"grad_norm": 0.3868095376144605, |
|
"learning_rate": 1.4113440201333701e-05, |
|
"loss": 0.6992, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9975289868846227, |
|
"grad_norm": 0.42385907348580026, |
|
"learning_rate": 1.4080314122548488e-05, |
|
"loss": 0.7347, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.9990496103402395, |
|
"grad_norm": 0.41348925205041664, |
|
"learning_rate": 1.4047184869063401e-05, |
|
"loss": 0.7441, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.0005702337958564, |
|
"grad_norm": 0.35648966659894604, |
|
"learning_rate": 1.4014052639205127e-05, |
|
"loss": 0.6707, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.002090857251473, |
|
"grad_norm": 0.34244915908174534, |
|
"learning_rate": 1.3980917631318145e-05, |
|
"loss": 0.7027, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.00361148070709, |
|
"grad_norm": 0.3670346028136332, |
|
"learning_rate": 1.394778004376359e-05, |
|
"loss": 0.7363, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.0051321041627068, |
|
"grad_norm": 0.391603629750881, |
|
"learning_rate": 1.3914640074918023e-05, |
|
"loss": 0.7351, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.0066527276183235, |
|
"grad_norm": 0.3716653098931945, |
|
"learning_rate": 1.3881497923172269e-05, |
|
"loss": 0.7441, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.0081733510739403, |
|
"grad_norm": 0.3755372180582686, |
|
"learning_rate": 1.3848353786930213e-05, |
|
"loss": 0.6965, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.0096939745295572, |
|
"grad_norm": 0.381844726209517, |
|
"learning_rate": 1.3815207864607634e-05, |
|
"loss": 0.7302, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.0112145979851739, |
|
"grad_norm": 0.36415027113970666, |
|
"learning_rate": 1.3782060354630988e-05, |
|
"loss": 0.6935, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.0127352214407908, |
|
"grad_norm": 0.3814827703352279, |
|
"learning_rate": 1.3748911455436241e-05, |
|
"loss": 0.7288, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.0142558448964076, |
|
"grad_norm": 0.3695679755631074, |
|
"learning_rate": 1.371576136546768e-05, |
|
"loss": 0.7069, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.0157764683520243, |
|
"grad_norm": 0.3692625124863184, |
|
"learning_rate": 1.3682610283176713e-05, |
|
"loss": 0.7308, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.0172970918076412, |
|
"grad_norm": 0.3647179500902266, |
|
"learning_rate": 1.3649458407020692e-05, |
|
"loss": 0.724, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.0003801558639043, |
|
"grad_norm": 0.39033436102991464, |
|
"learning_rate": 1.3616305935461726e-05, |
|
"loss": 0.6831, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.001900779319521, |
|
"grad_norm": 0.6228269584548538, |
|
"learning_rate": 1.358315306696548e-05, |
|
"loss": 0.5697, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.0034214027751378, |
|
"grad_norm": 0.4482616727348143, |
|
"learning_rate": 1.355e-05, |
|
"loss": 0.5192, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.0049420262307547, |
|
"grad_norm": 0.5196584857248853, |
|
"learning_rate": 1.3516846933034525e-05, |
|
"loss": 0.5425, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.0064626496863713, |
|
"grad_norm": 0.5838517226989333, |
|
"learning_rate": 1.3483694064538279e-05, |
|
"loss": 0.5638, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.0079832731419882, |
|
"grad_norm": 0.4333646030218863, |
|
"learning_rate": 1.3450541592979308e-05, |
|
"loss": 0.5361, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.009503896597605, |
|
"grad_norm": 0.46529778581028425, |
|
"learning_rate": 1.341738971682329e-05, |
|
"loss": 0.5338, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.0110245200532217, |
|
"grad_norm": 0.50417271156346, |
|
"learning_rate": 1.3384238634532324e-05, |
|
"loss": 0.5341, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.0125451435088386, |
|
"grad_norm": 0.4846246525284431, |
|
"learning_rate": 1.3351088544563762e-05, |
|
"loss": 0.5454, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.0140657669644555, |
|
"grad_norm": 0.43151729793082855, |
|
"learning_rate": 1.3317939645369014e-05, |
|
"loss": 0.5149, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.0155863904200721, |
|
"grad_norm": 0.45346621189906927, |
|
"learning_rate": 1.3284792135392369e-05, |
|
"loss": 0.5241, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.017107013875689, |
|
"grad_norm": 0.4529580634485115, |
|
"learning_rate": 1.3251646213069789e-05, |
|
"loss": 0.5252, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.018627637331306, |
|
"grad_norm": 0.4309687856541669, |
|
"learning_rate": 1.3218502076827738e-05, |
|
"loss": 0.5041, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.0201482607869226, |
|
"grad_norm": 0.40932019500719957, |
|
"learning_rate": 1.3185359925081977e-05, |
|
"loss": 0.5131, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.0216688842425394, |
|
"grad_norm": 0.43569657302929155, |
|
"learning_rate": 1.3152219956236414e-05, |
|
"loss": 0.5058, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.0231895076981563, |
|
"grad_norm": 0.41224009605600054, |
|
"learning_rate": 1.3119082368681858e-05, |
|
"loss": 0.5102, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.024710131153773, |
|
"grad_norm": 0.4633671152656241, |
|
"learning_rate": 1.3085947360794881e-05, |
|
"loss": 0.5538, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.0262307546093898, |
|
"grad_norm": 0.3981390104088507, |
|
"learning_rate": 1.3052815130936598e-05, |
|
"loss": 0.5053, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.0277513780650067, |
|
"grad_norm": 0.42462762773917034, |
|
"learning_rate": 1.3019685877451516e-05, |
|
"loss": 0.5308, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.0292720015206234, |
|
"grad_norm": 0.3928387548640261, |
|
"learning_rate": 1.29865597986663e-05, |
|
"loss": 0.4768, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.0307926249762402, |
|
"grad_norm": 0.4320276733077232, |
|
"learning_rate": 1.2953437092888625e-05, |
|
"loss": 0.5221, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.0323132484318571, |
|
"grad_norm": 0.4334542028887193, |
|
"learning_rate": 1.2920317958405962e-05, |
|
"loss": 0.5129, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.0338338718874738, |
|
"grad_norm": 0.40146788598973115, |
|
"learning_rate": 1.2887202593484417e-05, |
|
"loss": 0.5052, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.0353544953430907, |
|
"grad_norm": 0.41287405977658365, |
|
"learning_rate": 1.2854091196367518e-05, |
|
"loss": 0.5425, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.0368751187987075, |
|
"grad_norm": 0.3708390977190173, |
|
"learning_rate": 1.2820983965275043e-05, |
|
"loss": 0.5238, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.0383957422543242, |
|
"grad_norm": 0.35512557723314947, |
|
"learning_rate": 1.278788109840183e-05, |
|
"loss": 0.5096, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.039916365709941, |
|
"grad_norm": 0.41469287495730844, |
|
"learning_rate": 1.2754782793916598e-05, |
|
"loss": 0.5159, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.041436989165558, |
|
"grad_norm": 0.39113759999764336, |
|
"learning_rate": 1.2721689249960743e-05, |
|
"loss": 0.5184, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.0429576126211746, |
|
"grad_norm": 0.41576137745186253, |
|
"learning_rate": 1.2688600664647168e-05, |
|
"loss": 0.5251, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.0444782360767915, |
|
"grad_norm": 0.37659361938427355, |
|
"learning_rate": 1.2655517236059083e-05, |
|
"loss": 0.511, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.0459988595324083, |
|
"grad_norm": 0.36191609676818887, |
|
"learning_rate": 1.2622439162248846e-05, |
|
"loss": 0.5142, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.047519482988025, |
|
"grad_norm": 0.3501769260644456, |
|
"learning_rate": 1.2589366641236745e-05, |
|
"loss": 0.5086, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.0490401064436419, |
|
"grad_norm": 0.374193201836417, |
|
"learning_rate": 1.2556299871009825e-05, |
|
"loss": 0.5251, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.0505607298992587, |
|
"grad_norm": 0.40667780389565045, |
|
"learning_rate": 1.2523239049520706e-05, |
|
"loss": 0.5133, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.0520813533548754, |
|
"grad_norm": 0.3484682044458917, |
|
"learning_rate": 1.2490184374686406e-05, |
|
"loss": 0.5078, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.0536019768104923, |
|
"grad_norm": 0.3406587720902772, |
|
"learning_rate": 1.2457136044387133e-05, |
|
"loss": 0.5107, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.0551226002661092, |
|
"grad_norm": 0.35983619595762045, |
|
"learning_rate": 1.242409425646512e-05, |
|
"loss": 0.5349, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.0566432237217258, |
|
"grad_norm": 0.3722826497059545, |
|
"learning_rate": 1.2391059208723433e-05, |
|
"loss": 0.5118, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.0581638471773427, |
|
"grad_norm": 0.367794414326388, |
|
"learning_rate": 1.2358031098924792e-05, |
|
"loss": 0.5012, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.0596844706329596, |
|
"grad_norm": 0.379706737726683, |
|
"learning_rate": 1.232501012479038e-05, |
|
"loss": 0.5234, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.0612050940885762, |
|
"grad_norm": 0.3808569060578708, |
|
"learning_rate": 1.2291996483998664e-05, |
|
"loss": 0.4976, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.062725717544193, |
|
"grad_norm": 0.4052638293016256, |
|
"learning_rate": 1.2258990374184197e-05, |
|
"loss": 0.5115, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.06424634099981, |
|
"grad_norm": 0.3884487585023922, |
|
"learning_rate": 1.222599199293648e-05, |
|
"loss": 0.5469, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.0657669644554266, |
|
"grad_norm": 0.3523224705049633, |
|
"learning_rate": 1.2193001537798717e-05, |
|
"loss": 0.5031, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.0672875879110435, |
|
"grad_norm": 0.4035860752243172, |
|
"learning_rate": 1.2160019206266676e-05, |
|
"loss": 0.5036, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.0688082113666604, |
|
"grad_norm": 0.35467473723128, |
|
"learning_rate": 1.2127045195787486e-05, |
|
"loss": 0.5182, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.070328834822277, |
|
"grad_norm": 0.41329760884730493, |
|
"learning_rate": 1.2094079703758474e-05, |
|
"loss": 0.5212, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.071849458277894, |
|
"grad_norm": 0.36806047789304813, |
|
"learning_rate": 1.2061122927525962e-05, |
|
"loss": 0.5148, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.0733700817335108, |
|
"grad_norm": 0.4473641673792849, |
|
"learning_rate": 1.20281750643841e-05, |
|
"loss": 0.5217, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.0748907051891274, |
|
"grad_norm": 0.35449349576414274, |
|
"learning_rate": 1.1995236311573668e-05, |
|
"loss": 0.5057, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.0764113286447443, |
|
"grad_norm": 0.37802608508785335, |
|
"learning_rate": 1.1962306866280938e-05, |
|
"loss": 0.5127, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.0779319521003612, |
|
"grad_norm": 0.38606300562371665, |
|
"learning_rate": 1.192938692563643e-05, |
|
"loss": 0.5225, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.0794525755559778, |
|
"grad_norm": 0.3548827181233496, |
|
"learning_rate": 1.1896476686713781e-05, |
|
"loss": 0.5097, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.0809731990115947, |
|
"grad_norm": 0.38704197291443254, |
|
"learning_rate": 1.1863576346528536e-05, |
|
"loss": 0.5147, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.0824938224672116, |
|
"grad_norm": 0.3788637749591371, |
|
"learning_rate": 1.1830686102037011e-05, |
|
"loss": 0.526, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.0840144459228283, |
|
"grad_norm": 0.36699155176089776, |
|
"learning_rate": 1.1797806150135052e-05, |
|
"loss": 0.5074, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.0855350693784451, |
|
"grad_norm": 0.398207632535489, |
|
"learning_rate": 1.1764936687656908e-05, |
|
"loss": 0.5263, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.087055692834062, |
|
"grad_norm": 0.34386679094163725, |
|
"learning_rate": 1.1732077911374022e-05, |
|
"loss": 0.5292, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.0885763162896787, |
|
"grad_norm": 0.3898697838147865, |
|
"learning_rate": 1.1699230017993875e-05, |
|
"loss": 0.5157, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.0900969397452955, |
|
"grad_norm": 0.35036102017636284, |
|
"learning_rate": 1.1666393204158793e-05, |
|
"loss": 0.5103, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.0916175632009124, |
|
"grad_norm": 0.3766316653297432, |
|
"learning_rate": 1.1633567666444775e-05, |
|
"loss": 0.5355, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.093138186656529, |
|
"grad_norm": 0.3395395584528322, |
|
"learning_rate": 1.1600753601360308e-05, |
|
"loss": 0.4922, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.094658810112146, |
|
"grad_norm": 0.36396997728904185, |
|
"learning_rate": 1.1567951205345224e-05, |
|
"loss": 0.5219, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.0961794335677628, |
|
"grad_norm": 0.3565103395663914, |
|
"learning_rate": 1.153516067476947e-05, |
|
"loss": 0.5089, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.0977000570233795, |
|
"grad_norm": 0.3671854484970251, |
|
"learning_rate": 1.1502382205931974e-05, |
|
"loss": 0.5259, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.0992206804789963, |
|
"grad_norm": 0.35699046104294285, |
|
"learning_rate": 1.1469615995059455e-05, |
|
"loss": 0.5199, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.1007413039346132, |
|
"grad_norm": 0.35814749552055475, |
|
"learning_rate": 1.1436862238305256e-05, |
|
"loss": 0.4951, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.1022619273902299, |
|
"grad_norm": 0.37766411014866846, |
|
"learning_rate": 1.1404121131748156e-05, |
|
"loss": 0.5133, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.1037825508458468, |
|
"grad_norm": 0.38876585742040404, |
|
"learning_rate": 1.1371392871391207e-05, |
|
"loss": 0.5358, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.1053031743014636, |
|
"grad_norm": 0.4211502488863111, |
|
"learning_rate": 1.1338677653160557e-05, |
|
"loss": 0.5083, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.1068237977570803, |
|
"grad_norm": 0.3864747484032165, |
|
"learning_rate": 1.1305975672904284e-05, |
|
"loss": 0.5258, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.1083444212126972, |
|
"grad_norm": 0.39083288027419083, |
|
"learning_rate": 1.1273287126391206e-05, |
|
"loss": 0.4843, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.109865044668314, |
|
"grad_norm": 0.3910337272002525, |
|
"learning_rate": 1.1240612209309734e-05, |
|
"loss": 0.5479, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.111385668123931, |
|
"grad_norm": 0.37980182130941675, |
|
"learning_rate": 1.1207951117266668e-05, |
|
"loss": 0.4885, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.1129062915795476, |
|
"grad_norm": 0.3697135253054006, |
|
"learning_rate": 1.1175304045786073e-05, |
|
"loss": 0.523, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.1144269150351644, |
|
"grad_norm": 0.3772904905798372, |
|
"learning_rate": 1.1142671190308056e-05, |
|
"loss": 0.5237, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.1159475384907813, |
|
"grad_norm": 0.4056017049635124, |
|
"learning_rate": 1.1110052746187631e-05, |
|
"loss": 0.5137, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.117468161946398, |
|
"grad_norm": 0.35112819485311203, |
|
"learning_rate": 1.1077448908693534e-05, |
|
"loss": 0.5038, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.1189887854020149, |
|
"grad_norm": 0.38291495750095184, |
|
"learning_rate": 1.104485987300707e-05, |
|
"loss": 0.5084, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.1205094088576317, |
|
"grad_norm": 0.3922969549725083, |
|
"learning_rate": 1.1012285834220924e-05, |
|
"loss": 0.5092, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.1220300323132484, |
|
"grad_norm": 0.40549020760259696, |
|
"learning_rate": 1.0979726987338002e-05, |
|
"loss": 0.5204, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.1235506557688653, |
|
"grad_norm": 0.40395038468147576, |
|
"learning_rate": 1.0947183527270267e-05, |
|
"loss": 0.5227, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.1250712792244821, |
|
"grad_norm": 0.41236903215126236, |
|
"learning_rate": 1.0914655648837576e-05, |
|
"loss": 0.5319, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.1265919026800988, |
|
"grad_norm": 0.3677643398357805, |
|
"learning_rate": 1.0882143546766496e-05, |
|
"loss": 0.5038, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.1281125261357157, |
|
"grad_norm": 0.3828445757904712, |
|
"learning_rate": 1.084964741568916e-05, |
|
"loss": 0.5194, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.1296331495913325, |
|
"grad_norm": 0.39209056252387725, |
|
"learning_rate": 1.0817167450142074e-05, |
|
"loss": 0.5013, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.1311537730469492, |
|
"grad_norm": 0.3680446404406516, |
|
"learning_rate": 1.0784703844564998e-05, |
|
"loss": 0.5231, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.132674396502566, |
|
"grad_norm": 0.3703842374744353, |
|
"learning_rate": 1.0752256793299732e-05, |
|
"loss": 0.5153, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.134195019958183, |
|
"grad_norm": 0.37037413568566663, |
|
"learning_rate": 1.071982649058898e-05, |
|
"loss": 0.5124, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.1357156434137996, |
|
"grad_norm": 0.34295639916690035, |
|
"learning_rate": 1.0687413130575176e-05, |
|
"loss": 0.5132, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.1372362668694165, |
|
"grad_norm": 0.39357330789283446, |
|
"learning_rate": 1.0655016907299346e-05, |
|
"loss": 0.5288, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.1387568903250334, |
|
"grad_norm": 0.3690059684924214, |
|
"learning_rate": 1.0622638014699909e-05, |
|
"loss": 0.4947, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.14027751378065, |
|
"grad_norm": 0.3797018296519779, |
|
"learning_rate": 1.0590276646611539e-05, |
|
"loss": 0.5021, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.1417981372362669, |
|
"grad_norm": 0.37238784723531165, |
|
"learning_rate": 1.0557932996764002e-05, |
|
"loss": 0.5111, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.1433187606918838, |
|
"grad_norm": 0.34262491030815395, |
|
"learning_rate": 1.0525607258781e-05, |
|
"loss": 0.5095, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.1448393841475004, |
|
"grad_norm": 0.3516626249819126, |
|
"learning_rate": 1.0493299626178997e-05, |
|
"loss": 0.5256, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.1463600076031173, |
|
"grad_norm": 0.36336127776946636, |
|
"learning_rate": 1.0461010292366076e-05, |
|
"loss": 0.5163, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.1478806310587342, |
|
"grad_norm": 0.3660245007970849, |
|
"learning_rate": 1.0428739450640768e-05, |
|
"loss": 0.5025, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.1494012545143508, |
|
"grad_norm": 0.36947456530920786, |
|
"learning_rate": 1.039648729419092e-05, |
|
"loss": 0.5331, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.1509218779699677, |
|
"grad_norm": 0.35068301401351004, |
|
"learning_rate": 1.0364254016092499e-05, |
|
"loss": 0.4979, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.1524425014255846, |
|
"grad_norm": 0.3711143416445139, |
|
"learning_rate": 1.033203980930847e-05, |
|
"loss": 0.5349, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.1539631248812012, |
|
"grad_norm": 0.3739635788047117, |
|
"learning_rate": 1.0299844866687616e-05, |
|
"loss": 0.4975, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.155483748336818, |
|
"grad_norm": 0.33767131159596325, |
|
"learning_rate": 1.0267669380963419e-05, |
|
"loss": 0.4986, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.157004371792435, |
|
"grad_norm": 0.4087303964655199, |
|
"learning_rate": 1.0235513544752866e-05, |
|
"loss": 0.4995, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.1585249952480516, |
|
"grad_norm": 0.3669384792321335, |
|
"learning_rate": 1.0203377550555312e-05, |
|
"loss": 0.5033, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.1600456187036685, |
|
"grad_norm": 0.3582614117799248, |
|
"learning_rate": 1.0171261590751334e-05, |
|
"loss": 0.5068, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.1615662421592854, |
|
"grad_norm": 0.3548960213582146, |
|
"learning_rate": 1.0139165857601578e-05, |
|
"loss": 0.5219, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.163086865614902, |
|
"grad_norm": 0.35769530792522997, |
|
"learning_rate": 1.0107090543245597e-05, |
|
"loss": 0.4991, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.164607489070519, |
|
"grad_norm": 0.3762621019947577, |
|
"learning_rate": 1.0075035839700708e-05, |
|
"loss": 0.5318, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.1661281125261358, |
|
"grad_norm": 0.3428064272464147, |
|
"learning_rate": 1.0043001938860842e-05, |
|
"loss": 0.5168, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.1676487359817525, |
|
"grad_norm": 0.3740132674321784, |
|
"learning_rate": 1.001098903249541e-05, |
|
"loss": 0.512, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1691693594373693, |
|
"grad_norm": 0.37086418652390696, |
|
"learning_rate": 9.978997312248124e-06, |
|
"loss": 0.5046, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.1706899828929862, |
|
"grad_norm": 0.34389709834427135, |
|
"learning_rate": 9.947026969635868e-06, |
|
"loss": 0.499, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.1722106063486029, |
|
"grad_norm": 0.36925894599740416, |
|
"learning_rate": 9.915078196047548e-06, |
|
"loss": 0.5269, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.1737312298042197, |
|
"grad_norm": 0.34734014615226194, |
|
"learning_rate": 9.883151182742967e-06, |
|
"loss": 0.5159, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.1752518532598366, |
|
"grad_norm": 0.38858923268368156, |
|
"learning_rate": 9.851246120851641e-06, |
|
"loss": 0.5599, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.1767724767154533, |
|
"grad_norm": 0.36352194480464795, |
|
"learning_rate": 9.819363201371682e-06, |
|
"loss": 0.5077, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.1782931001710701, |
|
"grad_norm": 0.38508119384888934, |
|
"learning_rate": 9.78750261516864e-06, |
|
"loss": 0.5455, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.179813723626687, |
|
"grad_norm": 0.35527440995181414, |
|
"learning_rate": 9.755664552974386e-06, |
|
"loss": 0.5128, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.1813343470823037, |
|
"grad_norm": 0.3524825961134292, |
|
"learning_rate": 9.723849205385932e-06, |
|
"loss": 0.5062, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.1828549705379205, |
|
"grad_norm": 0.4044926970167433, |
|
"learning_rate": 9.692056762864325e-06, |
|
"loss": 0.5412, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.1843755939935374, |
|
"grad_norm": 0.35710970899973166, |
|
"learning_rate": 9.660287415733477e-06, |
|
"loss": 0.52, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.185896217449154, |
|
"grad_norm": 0.37803033804258834, |
|
"learning_rate": 9.62854135417906e-06, |
|
"loss": 0.5052, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.187416840904771, |
|
"grad_norm": 0.39768355225535407, |
|
"learning_rate": 9.596818768247333e-06, |
|
"loss": 0.5092, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.1889374643603878, |
|
"grad_norm": 0.348711884248085, |
|
"learning_rate": 9.565119847844027e-06, |
|
"loss": 0.5267, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.1904580878160045, |
|
"grad_norm": 0.3834819895765528, |
|
"learning_rate": 9.533444782733183e-06, |
|
"loss": 0.5037, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.1919787112716214, |
|
"grad_norm": 0.3715822681968465, |
|
"learning_rate": 9.501793762536068e-06, |
|
"loss": 0.5308, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.1934993347272382, |
|
"grad_norm": 0.3652977625843937, |
|
"learning_rate": 9.470166976729976e-06, |
|
"loss": 0.5372, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.195019958182855, |
|
"grad_norm": 0.3364456352273384, |
|
"learning_rate": 9.43856461464713e-06, |
|
"loss": 0.4963, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.1965405816384718, |
|
"grad_norm": 0.3472569441190444, |
|
"learning_rate": 9.40698686547354e-06, |
|
"loss": 0.5284, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.1980612050940886, |
|
"grad_norm": 0.34440523348480445, |
|
"learning_rate": 9.375433918247888e-06, |
|
"loss": 0.4989, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.1995818285497053, |
|
"grad_norm": 0.348206283208153, |
|
"learning_rate": 9.343905961860358e-06, |
|
"loss": 0.5226, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.2011024520053222, |
|
"grad_norm": 0.3505525203415447, |
|
"learning_rate": 9.31240318505154e-06, |
|
"loss": 0.5047, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.202623075460939, |
|
"grad_norm": 0.3515111190341964, |
|
"learning_rate": 9.280925776411278e-06, |
|
"loss": 0.5127, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.2041436989165557, |
|
"grad_norm": 0.3620692993419406, |
|
"learning_rate": 9.249473924377573e-06, |
|
"loss": 0.5044, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.2056643223721726, |
|
"grad_norm": 0.3659564097222614, |
|
"learning_rate": 9.218047817235408e-06, |
|
"loss": 0.5319, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.2071849458277895, |
|
"grad_norm": 0.33548451693694653, |
|
"learning_rate": 9.186647643115659e-06, |
|
"loss": 0.4965, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.2087055692834061, |
|
"grad_norm": 0.37688953100329553, |
|
"learning_rate": 9.15527358999394e-06, |
|
"loss": 0.5453, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.210226192739023, |
|
"grad_norm": 0.3428883181534561, |
|
"learning_rate": 9.123925845689515e-06, |
|
"loss": 0.5392, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.2117468161946399, |
|
"grad_norm": 0.3431087734831433, |
|
"learning_rate": 9.092604597864141e-06, |
|
"loss": 0.5118, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.2132674396502565, |
|
"grad_norm": 0.35251065259476605, |
|
"learning_rate": 9.06131003402095e-06, |
|
"loss": 0.5054, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.2147880631058734, |
|
"grad_norm": 0.3496035139299239, |
|
"learning_rate": 9.030042341503332e-06, |
|
"loss": 0.5084, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.2163086865614903, |
|
"grad_norm": 0.33786656336389453, |
|
"learning_rate": 8.998801707493827e-06, |
|
"loss": 0.5199, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.217829310017107, |
|
"grad_norm": 0.3668696340788812, |
|
"learning_rate": 8.967588319012977e-06, |
|
"loss": 0.4874, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.2193499334727238, |
|
"grad_norm": 0.34867802829711625, |
|
"learning_rate": 8.936402362918224e-06, |
|
"loss": 0.4939, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.2208705569283407, |
|
"grad_norm": 0.33881823564824526, |
|
"learning_rate": 8.90524402590278e-06, |
|
"loss": 0.5354, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.2223911803839573, |
|
"grad_norm": 0.3395916132176579, |
|
"learning_rate": 8.874113494494537e-06, |
|
"loss": 0.5017, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.2239118038395742, |
|
"grad_norm": 0.33632837523359915, |
|
"learning_rate": 8.843010955054909e-06, |
|
"loss": 0.5006, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.225432427295191, |
|
"grad_norm": 0.33436403561258093, |
|
"learning_rate": 8.811936593777748e-06, |
|
"loss": 0.5052, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.2269530507508077, |
|
"grad_norm": 0.3224896536759886, |
|
"learning_rate": 8.780890596688211e-06, |
|
"loss": 0.4981, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.2284736742064246, |
|
"grad_norm": 0.34938066443230276, |
|
"learning_rate": 8.749873149641665e-06, |
|
"loss": 0.5203, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.2299942976620415, |
|
"grad_norm": 0.345563582040282, |
|
"learning_rate": 8.718884438322557e-06, |
|
"loss": 0.5162, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.2315149211176581, |
|
"grad_norm": 0.3428900758147224, |
|
"learning_rate": 8.687924648243311e-06, |
|
"loss": 0.4962, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.233035544573275, |
|
"grad_norm": 0.34750322921749416, |
|
"learning_rate": 8.65699396474321e-06, |
|
"loss": 0.5186, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.234556168028892, |
|
"grad_norm": 0.3589082109646499, |
|
"learning_rate": 8.626092572987297e-06, |
|
"loss": 0.5078, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.2360767914845086, |
|
"grad_norm": 0.3506370697057621, |
|
"learning_rate": 8.595220657965268e-06, |
|
"loss": 0.5105, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.2375974149401254, |
|
"grad_norm": 0.36011717525565096, |
|
"learning_rate": 8.564378404490342e-06, |
|
"loss": 0.5181, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.2391180383957423, |
|
"grad_norm": 0.3402637505055028, |
|
"learning_rate": 8.533565997198175e-06, |
|
"loss": 0.5186, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.2406386618513592, |
|
"grad_norm": 0.3475154384318057, |
|
"learning_rate": 8.50278362054577e-06, |
|
"loss": 0.4946, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.2421592853069758, |
|
"grad_norm": 0.3590598605292272, |
|
"learning_rate": 8.472031458810325e-06, |
|
"loss": 0.5011, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.2436799087625927, |
|
"grad_norm": 0.3483002221345324, |
|
"learning_rate": 8.441309696088174e-06, |
|
"loss": 0.5231, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.2452005322182096, |
|
"grad_norm": 0.3475177777900519, |
|
"learning_rate": 8.410618516293662e-06, |
|
"loss": 0.5165, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.2467211556738262, |
|
"grad_norm": 0.3633656751272662, |
|
"learning_rate": 8.37995810315806e-06, |
|
"loss": 0.5441, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.2482417791294431, |
|
"grad_norm": 0.3447799135446017, |
|
"learning_rate": 8.349328640228443e-06, |
|
"loss": 0.4933, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.24976240258506, |
|
"grad_norm": 0.34981751009747264, |
|
"learning_rate": 8.318730310866615e-06, |
|
"loss": 0.5008, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.2512830260406766, |
|
"grad_norm": 0.361234032357548, |
|
"learning_rate": 8.288163298247994e-06, |
|
"loss": 0.4979, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.2528036494962935, |
|
"grad_norm": 0.34834574638448507, |
|
"learning_rate": 8.257627785360528e-06, |
|
"loss": 0.5041, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.2543242729519104, |
|
"grad_norm": 0.3466954777402415, |
|
"learning_rate": 8.227123955003586e-06, |
|
"loss": 0.519, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.255844896407527, |
|
"grad_norm": 0.3555313535631245, |
|
"learning_rate": 8.196651989786872e-06, |
|
"loss": 0.513, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.257365519863144, |
|
"grad_norm": 0.36059188542064946, |
|
"learning_rate": 8.166212072129334e-06, |
|
"loss": 0.4975, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.2588861433187608, |
|
"grad_norm": 0.33893355712900364, |
|
"learning_rate": 8.135804384258074e-06, |
|
"loss": 0.4952, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.2604067667743775, |
|
"grad_norm": 0.33361223601614015, |
|
"learning_rate": 8.10542910820724e-06, |
|
"loss": 0.5061, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.2619273902299943, |
|
"grad_norm": 0.36477092577701276, |
|
"learning_rate": 8.07508642581696e-06, |
|
"loss": 0.5305, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.2634480136856112, |
|
"grad_norm": 0.3563029319847921, |
|
"learning_rate": 8.044776518732229e-06, |
|
"loss": 0.4953, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.2649686371412279, |
|
"grad_norm": 0.3480130680776454, |
|
"learning_rate": 8.014499568401845e-06, |
|
"loss": 0.5053, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.2664892605968447, |
|
"grad_norm": 0.33347701875752966, |
|
"learning_rate": 7.98425575607731e-06, |
|
"loss": 0.4956, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.2680098840524616, |
|
"grad_norm": 0.33599833733050055, |
|
"learning_rate": 7.95404526281174e-06, |
|
"loss": 0.5015, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.2695305075080783, |
|
"grad_norm": 0.3384957381349461, |
|
"learning_rate": 7.923868269458795e-06, |
|
"loss": 0.5062, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.2710511309636952, |
|
"grad_norm": 0.3252174947797439, |
|
"learning_rate": 7.893724956671587e-06, |
|
"loss": 0.4884, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.272571754419312, |
|
"grad_norm": 0.34540716209835276, |
|
"learning_rate": 7.86361550490161e-06, |
|
"loss": 0.5045, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.2740923778749287, |
|
"grad_norm": 0.3498563998796463, |
|
"learning_rate": 7.833540094397626e-06, |
|
"loss": 0.5118, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.2756130013305456, |
|
"grad_norm": 0.3647153038013536, |
|
"learning_rate": 7.803498905204638e-06, |
|
"loss": 0.5064, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.2771336247861624, |
|
"grad_norm": 0.3508552754714093, |
|
"learning_rate": 7.773492117162775e-06, |
|
"loss": 0.5035, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.278654248241779, |
|
"grad_norm": 0.33239766666827464, |
|
"learning_rate": 7.743519909906217e-06, |
|
"loss": 0.4986, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.280174871697396, |
|
"grad_norm": 0.4315070616641703, |
|
"learning_rate": 7.713582462862138e-06, |
|
"loss": 0.5331, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.2816954951530128, |
|
"grad_norm": 0.36173927943521234, |
|
"learning_rate": 7.683679955249609e-06, |
|
"loss": 0.5064, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.2832161186086295, |
|
"grad_norm": 0.3487948964838659, |
|
"learning_rate": 7.653812566078565e-06, |
|
"loss": 0.5061, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.2847367420642464, |
|
"grad_norm": 0.3484239609890833, |
|
"learning_rate": 7.623980474148672e-06, |
|
"loss": 0.5023, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.2862573655198632, |
|
"grad_norm": 0.36052769608217394, |
|
"learning_rate": 7.59418385804832e-06, |
|
"loss": 0.5267, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.28777798897548, |
|
"grad_norm": 0.37188400515013914, |
|
"learning_rate": 7.564422896153491e-06, |
|
"loss": 0.5455, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.2892986124310968, |
|
"grad_norm": 0.34411324307178126, |
|
"learning_rate": 7.534697766626769e-06, |
|
"loss": 0.5137, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2908192358867137, |
|
"grad_norm": 0.3364768988197693, |
|
"learning_rate": 7.50500864741619e-06, |
|
"loss": 0.5209, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.2923398593423303, |
|
"grad_norm": 0.3670085290524646, |
|
"learning_rate": 7.475355716254242e-06, |
|
"loss": 0.5402, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.2938604827979472, |
|
"grad_norm": 0.33251162694155534, |
|
"learning_rate": 7.445739150656753e-06, |
|
"loss": 0.4987, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.295381106253564, |
|
"grad_norm": 0.35070274634255805, |
|
"learning_rate": 7.416159127921868e-06, |
|
"loss": 0.5071, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.2969017297091807, |
|
"grad_norm": 0.3795469724864985, |
|
"learning_rate": 7.38661582512897e-06, |
|
"loss": 0.5399, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.2984223531647976, |
|
"grad_norm": 0.3396586485316285, |
|
"learning_rate": 7.3571094191375995e-06, |
|
"loss": 0.5218, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.2999429766204145, |
|
"grad_norm": 0.3360542873886429, |
|
"learning_rate": 7.327640086586438e-06, |
|
"loss": 0.5034, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.3014636000760311, |
|
"grad_norm": 0.3295788458214383, |
|
"learning_rate": 7.298208003892223e-06, |
|
"loss": 0.5096, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.302984223531648, |
|
"grad_norm": 0.35244946035448127, |
|
"learning_rate": 7.268813347248696e-06, |
|
"loss": 0.5085, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.3045048469872649, |
|
"grad_norm": 0.3451919708597768, |
|
"learning_rate": 7.239456292625539e-06, |
|
"loss": 0.5017, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.3060254704428815, |
|
"grad_norm": 0.3208050987232861, |
|
"learning_rate": 7.210137015767349e-06, |
|
"loss": 0.5024, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.3075460938984984, |
|
"grad_norm": 0.35131393844950287, |
|
"learning_rate": 7.1808556921925585e-06, |
|
"loss": 0.529, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.3090667173541153, |
|
"grad_norm": 0.35801843724419535, |
|
"learning_rate": 7.1516124971924e-06, |
|
"loss": 0.5297, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.310587340809732, |
|
"grad_norm": 0.3401955783986041, |
|
"learning_rate": 7.122407605829839e-06, |
|
"loss": 0.5035, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.3121079642653488, |
|
"grad_norm": 0.3371522526619987, |
|
"learning_rate": 7.093241192938552e-06, |
|
"loss": 0.4876, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.3136285877209657, |
|
"grad_norm": 0.35482672035164964, |
|
"learning_rate": 7.064113433121862e-06, |
|
"loss": 0.5243, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.3151492111765823, |
|
"grad_norm": 0.3535329850445862, |
|
"learning_rate": 7.03502450075169e-06, |
|
"loss": 0.5081, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.3166698346321992, |
|
"grad_norm": 0.3423103799188622, |
|
"learning_rate": 7.00597456996753e-06, |
|
"loss": 0.5121, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.318190458087816, |
|
"grad_norm": 0.35140677064268294, |
|
"learning_rate": 6.976963814675376e-06, |
|
"loss": 0.5171, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.3197110815434328, |
|
"grad_norm": 0.352748609509177, |
|
"learning_rate": 6.947992408546731e-06, |
|
"loss": 0.535, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.3212317049990496, |
|
"grad_norm": 0.3317879618506482, |
|
"learning_rate": 6.919060525017507e-06, |
|
"loss": 0.4942, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.3227523284546665, |
|
"grad_norm": 0.3547220499965655, |
|
"learning_rate": 6.890168337287031e-06, |
|
"loss": 0.5091, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.3242729519102832, |
|
"grad_norm": 0.32077453688103713, |
|
"learning_rate": 6.861316018316979e-06, |
|
"loss": 0.515, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.3257935753659, |
|
"grad_norm": 0.3399152548809487, |
|
"learning_rate": 6.832503740830382e-06, |
|
"loss": 0.507, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.327314198821517, |
|
"grad_norm": 0.3318950288383037, |
|
"learning_rate": 6.803731677310529e-06, |
|
"loss": 0.4966, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.3288348222771336, |
|
"grad_norm": 0.3464596127850353, |
|
"learning_rate": 6.775000000000004e-06, |
|
"loss": 0.5115, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.3303554457327504, |
|
"grad_norm": 0.34400466984436623, |
|
"learning_rate": 6.746308880899589e-06, |
|
"loss": 0.5082, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.3318760691883673, |
|
"grad_norm": 0.3700416848449434, |
|
"learning_rate": 6.71765849176729e-06, |
|
"loss": 0.5302, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.333396692643984, |
|
"grad_norm": 0.333378413090238, |
|
"learning_rate": 6.689049004117282e-06, |
|
"loss": 0.5021, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.3349173160996008, |
|
"grad_norm": 0.3384873522425988, |
|
"learning_rate": 6.660480589218871e-06, |
|
"loss": 0.4805, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.3364379395552177, |
|
"grad_norm": 0.34806352457541345, |
|
"learning_rate": 6.631953418095496e-06, |
|
"loss": 0.4912, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.3379585630108344, |
|
"grad_norm": 0.34876873046591195, |
|
"learning_rate": 6.603467661523689e-06, |
|
"loss": 0.5082, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.3394791864664513, |
|
"grad_norm": 0.31774849749377526, |
|
"learning_rate": 6.5750234900320585e-06, |
|
"loss": 0.4818, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.3409998099220681, |
|
"grad_norm": 0.3324374020356446, |
|
"learning_rate": 6.546621073900255e-06, |
|
"loss": 0.4998, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.3425204333776848, |
|
"grad_norm": 0.3353452020425694, |
|
"learning_rate": 6.518260583157976e-06, |
|
"loss": 0.4921, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.3440410568333017, |
|
"grad_norm": 0.3493478007336028, |
|
"learning_rate": 6.489942187583926e-06, |
|
"loss": 0.5116, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.3455616802889185, |
|
"grad_norm": 0.3284929233347785, |
|
"learning_rate": 6.461666056704817e-06, |
|
"loss": 0.5009, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.3470823037445352, |
|
"grad_norm": 0.34202330071520587, |
|
"learning_rate": 6.433432359794333e-06, |
|
"loss": 0.5034, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.348602927200152, |
|
"grad_norm": 0.33111021835858967, |
|
"learning_rate": 6.405241265872139e-06, |
|
"loss": 0.5044, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.350123550655769, |
|
"grad_norm": 0.3354271189663323, |
|
"learning_rate": 6.37709294370286e-06, |
|
"loss": 0.4976, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.3516441741113856, |
|
"grad_norm": 0.33711612171640787, |
|
"learning_rate": 6.3489875617950605e-06, |
|
"loss": 0.5018, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.3531647975670025, |
|
"grad_norm": 0.33268125847154223, |
|
"learning_rate": 6.320925288400259e-06, |
|
"loss": 0.5061, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.3546854210226194, |
|
"grad_norm": 0.3297595687007157, |
|
"learning_rate": 6.292906291511883e-06, |
|
"loss": 0.5008, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.356206044478236, |
|
"grad_norm": 0.33711090972493357, |
|
"learning_rate": 6.2649307388643245e-06, |
|
"loss": 0.4899, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.3577266679338529, |
|
"grad_norm": 0.35714761250864835, |
|
"learning_rate": 6.236998797931864e-06, |
|
"loss": 0.5336, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.3592472913894698, |
|
"grad_norm": 0.33101330668166734, |
|
"learning_rate": 6.209110635927724e-06, |
|
"loss": 0.5188, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.3607679148450864, |
|
"grad_norm": 0.33231897329769716, |
|
"learning_rate": 6.181266419803025e-06, |
|
"loss": 0.4992, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.3622885383007033, |
|
"grad_norm": 0.3391798280349641, |
|
"learning_rate": 6.153466316245841e-06, |
|
"loss": 0.5145, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.3638091617563202, |
|
"grad_norm": 0.3397695996188909, |
|
"learning_rate": 6.125710491680132e-06, |
|
"loss": 0.5148, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.3653297852119368, |
|
"grad_norm": 0.33259300354324434, |
|
"learning_rate": 6.097999112264814e-06, |
|
"loss": 0.4983, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.3668504086675537, |
|
"grad_norm": 0.34841156749227775, |
|
"learning_rate": 6.070332343892708e-06, |
|
"loss": 0.5197, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.3683710321231706, |
|
"grad_norm": 0.3406141056782546, |
|
"learning_rate": 6.042710352189592e-06, |
|
"loss": 0.49, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.3698916555787872, |
|
"grad_norm": 0.3479407700510544, |
|
"learning_rate": 6.01513330251319e-06, |
|
"loss": 0.5054, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.371412279034404, |
|
"grad_norm": 0.34636977510682354, |
|
"learning_rate": 5.9876013599521654e-06, |
|
"loss": 0.5273, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.372932902490021, |
|
"grad_norm": 0.32587606375019723, |
|
"learning_rate": 5.9601146893251685e-06, |
|
"loss": 0.5032, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.3744535259456376, |
|
"grad_norm": 0.34209096131732425, |
|
"learning_rate": 5.932673455179826e-06, |
|
"loss": 0.5004, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.3759741494012545, |
|
"grad_norm": 0.3462622805308902, |
|
"learning_rate": 5.9052778217917614e-06, |
|
"loss": 0.5121, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.3774947728568714, |
|
"grad_norm": 0.3685686058332204, |
|
"learning_rate": 5.8779279531636046e-06, |
|
"loss": 0.5314, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.379015396312488, |
|
"grad_norm": 0.3515509029896568, |
|
"learning_rate": 5.850624013024024e-06, |
|
"loss": 0.5215, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.380536019768105, |
|
"grad_norm": 0.32046588423851496, |
|
"learning_rate": 5.82336616482674e-06, |
|
"loss": 0.4974, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.3820566432237218, |
|
"grad_norm": 0.3808130154691177, |
|
"learning_rate": 5.796154571749547e-06, |
|
"loss": 0.525, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.3835772666793384, |
|
"grad_norm": 0.3454746636565212, |
|
"learning_rate": 5.768989396693324e-06, |
|
"loss": 0.4923, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.3850978901349553, |
|
"grad_norm": 0.3321076118691578, |
|
"learning_rate": 5.741870802281081e-06, |
|
"loss": 0.4797, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.3866185135905722, |
|
"grad_norm": 0.3420626306833587, |
|
"learning_rate": 5.714798950856981e-06, |
|
"loss": 0.4923, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.3881391370461889, |
|
"grad_norm": 0.3353059399098719, |
|
"learning_rate": 5.687774004485342e-06, |
|
"loss": 0.5073, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.3896597605018057, |
|
"grad_norm": 0.3257554102906788, |
|
"learning_rate": 5.660796124949711e-06, |
|
"loss": 0.4942, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.3911803839574226, |
|
"grad_norm": 0.3413851694093131, |
|
"learning_rate": 5.633865473751846e-06, |
|
"loss": 0.5345, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.3927010074130393, |
|
"grad_norm": 0.33413542250569817, |
|
"learning_rate": 5.606982212110806e-06, |
|
"loss": 0.5205, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.3942216308686561, |
|
"grad_norm": 0.3413782795261156, |
|
"learning_rate": 5.580146500961927e-06, |
|
"loss": 0.4914, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.395742254324273, |
|
"grad_norm": 0.34562101413317636, |
|
"learning_rate": 5.553358500955903e-06, |
|
"loss": 0.5191, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.3972628777798897, |
|
"grad_norm": 0.36463942955800077, |
|
"learning_rate": 5.526618372457788e-06, |
|
"loss": 0.5031, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.3987835012355065, |
|
"grad_norm": 0.3490946304863948, |
|
"learning_rate": 5.49992627554609e-06, |
|
"loss": 0.5117, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.4003041246911234, |
|
"grad_norm": 0.35279790273339234, |
|
"learning_rate": 5.473282370011742e-06, |
|
"loss": 0.5274, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.40182474814674, |
|
"grad_norm": 0.349655925155731, |
|
"learning_rate": 5.446686815357206e-06, |
|
"loss": 0.4965, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.403345371602357, |
|
"grad_norm": 0.334218280540011, |
|
"learning_rate": 5.420139770795477e-06, |
|
"loss": 0.5216, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.4048659950579738, |
|
"grad_norm": 0.3630958366505651, |
|
"learning_rate": 5.393641395249157e-06, |
|
"loss": 0.5125, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.4063866185135905, |
|
"grad_norm": 0.3665354616192356, |
|
"learning_rate": 5.367191847349496e-06, |
|
"loss": 0.5171, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.4079072419692074, |
|
"grad_norm": 0.3388173032700996, |
|
"learning_rate": 5.340791285435425e-06, |
|
"loss": 0.5107, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.4094278654248242, |
|
"grad_norm": 0.3408412945651779, |
|
"learning_rate": 5.314439867552638e-06, |
|
"loss": 0.5092, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.4109484888804409, |
|
"grad_norm": 0.32178831689172704, |
|
"learning_rate": 5.288137751452625e-06, |
|
"loss": 0.5052, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.4124691123360578, |
|
"grad_norm": 0.3404604279948066, |
|
"learning_rate": 5.261885094591735e-06, |
|
"loss": 0.4989, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.4139897357916746, |
|
"grad_norm": 0.3528505749791748, |
|
"learning_rate": 5.235682054130224e-06, |
|
"loss": 0.4883, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.4155103592472913, |
|
"grad_norm": 0.33074842020507783, |
|
"learning_rate": 5.209528786931329e-06, |
|
"loss": 0.5143, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.4170309827029082, |
|
"grad_norm": 0.33129196603154915, |
|
"learning_rate": 5.183425449560322e-06, |
|
"loss": 0.5069, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.418551606158525, |
|
"grad_norm": 0.3450555489678585, |
|
"learning_rate": 5.1573721982835766e-06, |
|
"loss": 0.5194, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.4200722296141417, |
|
"grad_norm": 0.32000469036683216, |
|
"learning_rate": 5.131369189067611e-06, |
|
"loss": 0.4963, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.4215928530697586, |
|
"grad_norm": 0.3579257724532904, |
|
"learning_rate": 5.105416577578189e-06, |
|
"loss": 0.5009, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.4231134765253755, |
|
"grad_norm": 0.341889660403509, |
|
"learning_rate": 5.079514519179368e-06, |
|
"loss": 0.4955, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.424634099980992, |
|
"grad_norm": 0.3375109020144137, |
|
"learning_rate": 5.053663168932563e-06, |
|
"loss": 0.4925, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.426154723436609, |
|
"grad_norm": 0.3354140119830031, |
|
"learning_rate": 5.02786268159564e-06, |
|
"loss": 0.4828, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.4276753468922259, |
|
"grad_norm": 0.3411962173320927, |
|
"learning_rate": 5.002113211621957e-06, |
|
"loss": 0.5059, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.4291959703478425, |
|
"grad_norm": 0.3794888650970929, |
|
"learning_rate": 4.97641491315949e-06, |
|
"loss": 0.5182, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.4307165938034594, |
|
"grad_norm": 0.3438813382757282, |
|
"learning_rate": 4.950767940049846e-06, |
|
"loss": 0.5231, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.4322372172590763, |
|
"grad_norm": 0.33702340568088535, |
|
"learning_rate": 4.925172445827401e-06, |
|
"loss": 0.4898, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.433757840714693, |
|
"grad_norm": 0.33891170335451204, |
|
"learning_rate": 4.8996285837183344e-06, |
|
"loss": 0.4999, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.4352784641703098, |
|
"grad_norm": 0.3613620365932021, |
|
"learning_rate": 4.874136506639747e-06, |
|
"loss": 0.5109, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.4367990876259267, |
|
"grad_norm": 0.34896992869532173, |
|
"learning_rate": 4.848696367198726e-06, |
|
"loss": 0.503, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.4383197110815433, |
|
"grad_norm": 0.3319351812805159, |
|
"learning_rate": 4.823308317691442e-06, |
|
"loss": 0.5189, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.4398403345371602, |
|
"grad_norm": 0.3319386837420594, |
|
"learning_rate": 4.7979725101022175e-06, |
|
"loss": 0.4813, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.441360957992777, |
|
"grad_norm": 0.3397974585040773, |
|
"learning_rate": 4.772689096102645e-06, |
|
"loss": 0.5045, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.4428815814483937, |
|
"grad_norm": 0.31839220001298785, |
|
"learning_rate": 4.747458227050663e-06, |
|
"loss": 0.4815, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.4444022049040106, |
|
"grad_norm": 0.32870599013036034, |
|
"learning_rate": 4.7222800539896445e-06, |
|
"loss": 0.5142, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.4459228283596275, |
|
"grad_norm": 0.3342769560881234, |
|
"learning_rate": 4.697154727647507e-06, |
|
"loss": 0.4943, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.4474434518152441, |
|
"grad_norm": 0.34488780755334697, |
|
"learning_rate": 4.672082398435805e-06, |
|
"loss": 0.5137, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.448964075270861, |
|
"grad_norm": 0.3153153534150591, |
|
"learning_rate": 4.6470632164488295e-06, |
|
"loss": 0.4837, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.450484698726478, |
|
"grad_norm": 0.32145993722141425, |
|
"learning_rate": 4.622097331462696e-06, |
|
"loss": 0.4962, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.4520053221820945, |
|
"grad_norm": 0.33298486778180947, |
|
"learning_rate": 4.597184892934472e-06, |
|
"loss": 0.5103, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.4535259456377114, |
|
"grad_norm": 0.3450267637519744, |
|
"learning_rate": 4.572326050001267e-06, |
|
"loss": 0.4909, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.4550465690933283, |
|
"grad_norm": 0.3318326795828598, |
|
"learning_rate": 4.547520951479347e-06, |
|
"loss": 0.5057, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.456567192548945, |
|
"grad_norm": 0.33679166346861333, |
|
"learning_rate": 4.522769745863228e-06, |
|
"loss": 0.5057, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.4580878160045618, |
|
"grad_norm": 0.3395760337602621, |
|
"learning_rate": 4.498072581324811e-06, |
|
"loss": 0.5358, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.4596084394601787, |
|
"grad_norm": 0.3350852965124303, |
|
"learning_rate": 4.473429605712482e-06, |
|
"loss": 0.516, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.4611290629157954, |
|
"grad_norm": 0.3402104641226832, |
|
"learning_rate": 4.448840966550214e-06, |
|
"loss": 0.5042, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.4626496863714122, |
|
"grad_norm": 0.35213392448418906, |
|
"learning_rate": 4.424306811036717e-06, |
|
"loss": 0.5048, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.4641703098270291, |
|
"grad_norm": 0.30760979362082447, |
|
"learning_rate": 4.399827286044515e-06, |
|
"loss": 0.4976, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.4656909332826458, |
|
"grad_norm": 0.3713309741272073, |
|
"learning_rate": 4.375402538119116e-06, |
|
"loss": 0.4849, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.4672115567382626, |
|
"grad_norm": 0.32636579757729905, |
|
"learning_rate": 4.351032713478084e-06, |
|
"loss": 0.5011, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.4687321801938795, |
|
"grad_norm": 0.34427812460067464, |
|
"learning_rate": 4.326717958010203e-06, |
|
"loss": 0.4985, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.4702528036494962, |
|
"grad_norm": 0.3431253440023036, |
|
"learning_rate": 4.302458417274573e-06, |
|
"loss": 0.504, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.471773427105113, |
|
"grad_norm": 0.3219320912973509, |
|
"learning_rate": 4.278254236499767e-06, |
|
"loss": 0.4872, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.47329405056073, |
|
"grad_norm": 0.3259111453940081, |
|
"learning_rate": 4.2541055605829445e-06, |
|
"loss": 0.4915, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.4748146740163466, |
|
"grad_norm": 0.34356473045787483, |
|
"learning_rate": 4.2300125340889885e-06, |
|
"loss": 0.5041, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.4763352974719635, |
|
"grad_norm": 0.3302732477989796, |
|
"learning_rate": 4.2059753012496334e-06, |
|
"loss": 0.4975, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.4778559209275803, |
|
"grad_norm": 0.3139509766944064, |
|
"learning_rate": 4.181994005962612e-06, |
|
"loss": 0.4987, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.479376544383197, |
|
"grad_norm": 0.33007516038018775, |
|
"learning_rate": 4.158068791790798e-06, |
|
"loss": 0.5094, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.4808971678388139, |
|
"grad_norm": 0.3314396852932346, |
|
"learning_rate": 4.134199801961319e-06, |
|
"loss": 0.5214, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.4824177912944307, |
|
"grad_norm": 0.3265821672215369, |
|
"learning_rate": 4.110387179364735e-06, |
|
"loss": 0.4829, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.4839384147500474, |
|
"grad_norm": 0.3332694006250523, |
|
"learning_rate": 4.0866310665541576e-06, |
|
"loss": 0.5052, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.4854590382056643, |
|
"grad_norm": 0.3490720940072708, |
|
"learning_rate": 4.062931605744416e-06, |
|
"loss": 0.5092, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.4869796616612811, |
|
"grad_norm": 0.33398988538744084, |
|
"learning_rate": 4.0392889388111746e-06, |
|
"loss": 0.5188, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.4885002851168978, |
|
"grad_norm": 0.3270648319812971, |
|
"learning_rate": 4.015703207290125e-06, |
|
"loss": 0.5159, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.4900209085725147, |
|
"grad_norm": 0.32741030567450113, |
|
"learning_rate": 3.992174552376112e-06, |
|
"loss": 0.5025, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.4915415320281316, |
|
"grad_norm": 0.32463136930033865, |
|
"learning_rate": 3.968703114922282e-06, |
|
"loss": 0.4912, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.4930621554837482, |
|
"grad_norm": 0.3493187625119753, |
|
"learning_rate": 3.945289035439266e-06, |
|
"loss": 0.5022, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.494582778939365, |
|
"grad_norm": 0.3393915308815615, |
|
"learning_rate": 3.921932454094324e-06, |
|
"loss": 0.5121, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.496103402394982, |
|
"grad_norm": 0.3361281345069889, |
|
"learning_rate": 3.8986335107104904e-06, |
|
"loss": 0.5113, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.4976240258505986, |
|
"grad_norm": 0.33203560338943144, |
|
"learning_rate": 3.875392344765772e-06, |
|
"loss": 0.4754, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.4991446493062155, |
|
"grad_norm": 0.324328611770242, |
|
"learning_rate": 3.852209095392288e-06, |
|
"loss": 0.4993, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.5006652727618324, |
|
"grad_norm": 0.3558742459808321, |
|
"learning_rate": 3.829083901375433e-06, |
|
"loss": 0.5047, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.502185896217449, |
|
"grad_norm": 0.3338340547562902, |
|
"learning_rate": 3.8060169011530668e-06, |
|
"loss": 0.4759, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.5037065196730661, |
|
"grad_norm": 0.35120584802546506, |
|
"learning_rate": 3.783008232814671e-06, |
|
"loss": 0.5294, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.5052271431286828, |
|
"grad_norm": 0.3353936427085398, |
|
"learning_rate": 3.760058034100533e-06, |
|
"loss": 0.5284, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.5067477665842994, |
|
"grad_norm": 0.33688668654855014, |
|
"learning_rate": 3.7371664424008973e-06, |
|
"loss": 0.4959, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.5082683900399165, |
|
"grad_norm": 0.32266166581151934, |
|
"learning_rate": 3.714333594755177e-06, |
|
"loss": 0.4752, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.5097890134955332, |
|
"grad_norm": 0.33412231708735246, |
|
"learning_rate": 3.691559627851118e-06, |
|
"loss": 0.5179, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.5113096369511498, |
|
"grad_norm": 0.3243441675650943, |
|
"learning_rate": 3.6688446780239596e-06, |
|
"loss": 0.4943, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.512830260406767, |
|
"grad_norm": 0.327154574269167, |
|
"learning_rate": 3.646188881255658e-06, |
|
"loss": 0.492, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.5143508838623836, |
|
"grad_norm": 0.3192294634078481, |
|
"learning_rate": 3.623592373174045e-06, |
|
"loss": 0.4771, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.5158715073180002, |
|
"grad_norm": 0.3372732645788767, |
|
"learning_rate": 3.601055289052031e-06, |
|
"loss": 0.4949, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.5173921307736173, |
|
"grad_norm": 0.31829723772061774, |
|
"learning_rate": 3.578577763806769e-06, |
|
"loss": 0.49, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.518912754229234, |
|
"grad_norm": 0.3374508150505602, |
|
"learning_rate": 3.556159931998888e-06, |
|
"loss": 0.5138, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.5204333776848507, |
|
"grad_norm": 0.3368177667255345, |
|
"learning_rate": 3.5338019278316426e-06, |
|
"loss": 0.5332, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.5219540011404677, |
|
"grad_norm": 0.35595499914728906, |
|
"learning_rate": 3.5115038851501605e-06, |
|
"loss": 0.5093, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.5234746245960844, |
|
"grad_norm": 0.3549641645181648, |
|
"learning_rate": 3.4892659374405844e-06, |
|
"loss": 0.5251, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.524995248051701, |
|
"grad_norm": 0.32092817344554087, |
|
"learning_rate": 3.467088217829321e-06, |
|
"loss": 0.5086, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.5265158715073182, |
|
"grad_norm": 0.3163937191442778, |
|
"learning_rate": 3.4449708590822104e-06, |
|
"loss": 0.4997, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.5280364949629348, |
|
"grad_norm": 0.3204985454805181, |
|
"learning_rate": 3.422913993603756e-06, |
|
"loss": 0.523, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.5295571184185515, |
|
"grad_norm": 0.3282138238031523, |
|
"learning_rate": 3.4009177534363184e-06, |
|
"loss": 0.5096, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.5310777418741686, |
|
"grad_norm": 0.3243188648838239, |
|
"learning_rate": 3.378982270259323e-06, |
|
"loss": 0.4974, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.5325983653297852, |
|
"grad_norm": 0.32762061133060594, |
|
"learning_rate": 3.3571076753884802e-06, |
|
"loss": 0.5174, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.5341189887854019, |
|
"grad_norm": 0.3315702913947939, |
|
"learning_rate": 3.335294099774996e-06, |
|
"loss": 0.5317, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.535639612241019, |
|
"grad_norm": 0.3146514780388357, |
|
"learning_rate": 3.313541674004791e-06, |
|
"loss": 0.4823, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.5371602356966356, |
|
"grad_norm": 0.31324712226441537, |
|
"learning_rate": 3.291850528297705e-06, |
|
"loss": 0.4936, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.5386808591522523, |
|
"grad_norm": 0.3220338054628576, |
|
"learning_rate": 3.270220792506735e-06, |
|
"loss": 0.4897, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.5402014826078694, |
|
"grad_norm": 0.33327886740248275, |
|
"learning_rate": 3.2486525961172487e-06, |
|
"loss": 0.5118, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.541722106063486, |
|
"grad_norm": 0.31839568655177714, |
|
"learning_rate": 3.2271460682462175e-06, |
|
"loss": 0.4913, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.5432427295191027, |
|
"grad_norm": 0.3275702952073424, |
|
"learning_rate": 3.2057013376414218e-06, |
|
"loss": 0.5022, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.5447633529747198, |
|
"grad_norm": 0.3265904791104775, |
|
"learning_rate": 3.1843185326807096e-06, |
|
"loss": 0.5013, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.5462839764303364, |
|
"grad_norm": 0.34562281052781296, |
|
"learning_rate": 3.162997781371215e-06, |
|
"loss": 0.5245, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.547804599885953, |
|
"grad_norm": 0.3304005470169874, |
|
"learning_rate": 3.141739211348575e-06, |
|
"loss": 0.5034, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.5493252233415702, |
|
"grad_norm": 0.32880228905554193, |
|
"learning_rate": 3.1205429498761974e-06, |
|
"loss": 0.5015, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.5508458467971868, |
|
"grad_norm": 0.32336045302160143, |
|
"learning_rate": 3.0994091238444775e-06, |
|
"loss": 0.511, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.5523664702528035, |
|
"grad_norm": 0.3311162660651907, |
|
"learning_rate": 3.0783378597700453e-06, |
|
"loss": 0.5114, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.5538870937084206, |
|
"grad_norm": 0.3375600053400384, |
|
"learning_rate": 3.0573292837949997e-06, |
|
"loss": 0.4995, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.5554077171640373, |
|
"grad_norm": 0.3184801308022672, |
|
"learning_rate": 3.036383521686171e-06, |
|
"loss": 0.491, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.556928340619654, |
|
"grad_norm": 0.3453112317336798, |
|
"learning_rate": 3.015500698834343e-06, |
|
"loss": 0.5348, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.558448964075271, |
|
"grad_norm": 0.3178400015966866, |
|
"learning_rate": 2.9946809402535396e-06, |
|
"loss": 0.5184, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.5599695875308877, |
|
"grad_norm": 0.32719134398419775, |
|
"learning_rate": 2.9739243705802274e-06, |
|
"loss": 0.515, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.5614902109865043, |
|
"grad_norm": 0.3205800396648508, |
|
"learning_rate": 2.953231114072617e-06, |
|
"loss": 0.4964, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.5630108344421214, |
|
"grad_norm": 0.316255767582057, |
|
"learning_rate": 2.9326012946098793e-06, |
|
"loss": 0.4772, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.564531457897738, |
|
"grad_norm": 0.3175652603893164, |
|
"learning_rate": 2.9120350356914355e-06, |
|
"loss": 0.5086, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.5660520813533547, |
|
"grad_norm": 0.32582238160345084, |
|
"learning_rate": 2.891532460436206e-06, |
|
"loss": 0.513, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.5675727048089718, |
|
"grad_norm": 0.3296575006626612, |
|
"learning_rate": 2.8710936915818587e-06, |
|
"loss": 0.5023, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.5690933282645885, |
|
"grad_norm": 0.31611337448881216, |
|
"learning_rate": 2.8507188514840983e-06, |
|
"loss": 0.5156, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.5706139517202051, |
|
"grad_norm": 0.31450333160108834, |
|
"learning_rate": 2.8304080621159222e-06, |
|
"loss": 0.5039, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.5721345751758222, |
|
"grad_norm": 0.3213166122638692, |
|
"learning_rate": 2.8101614450668923e-06, |
|
"loss": 0.4891, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.5736551986314389, |
|
"grad_norm": 0.3410745502394496, |
|
"learning_rate": 2.7899791215423954e-06, |
|
"loss": 0.5091, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.5751758220870555, |
|
"grad_norm": 0.3888744291624106, |
|
"learning_rate": 2.76986121236294e-06, |
|
"loss": 0.5075, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.5766964455426726, |
|
"grad_norm": 0.32027232788552623, |
|
"learning_rate": 2.7498078379634194e-06, |
|
"loss": 0.4863, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.5782170689982893, |
|
"grad_norm": 0.3219460654212709, |
|
"learning_rate": 2.7298191183923903e-06, |
|
"loss": 0.4919, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.579737692453906, |
|
"grad_norm": 0.3189327947333556, |
|
"learning_rate": 2.709895173311352e-06, |
|
"loss": 0.507, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.581258315909523, |
|
"grad_norm": 0.34438583434356085, |
|
"learning_rate": 2.690036121994041e-06, |
|
"loss": 0.4899, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.5827789393651397, |
|
"grad_norm": 0.3398976699129184, |
|
"learning_rate": 2.6702420833257135e-06, |
|
"loss": 0.5225, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.5842995628207563, |
|
"grad_norm": 0.3102911393257611, |
|
"learning_rate": 2.6505131758024167e-06, |
|
"loss": 0.5059, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.5858201862763734, |
|
"grad_norm": 0.324270478165599, |
|
"learning_rate": 2.630849517530311e-06, |
|
"loss": 0.4981, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.58734080973199, |
|
"grad_norm": 0.3546913242376741, |
|
"learning_rate": 2.6112512262249243e-06, |
|
"loss": 0.5361, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.5888614331876068, |
|
"grad_norm": 0.34869377738298407, |
|
"learning_rate": 2.591718419210495e-06, |
|
"loss": 0.5103, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.5903820566432239, |
|
"grad_norm": 0.3123532728864231, |
|
"learning_rate": 2.572251213419218e-06, |
|
"loss": 0.4932, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.5919026800988405, |
|
"grad_norm": 0.32330942665052126, |
|
"learning_rate": 2.5528497253905883e-06, |
|
"loss": 0.5037, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.5934233035544574, |
|
"grad_norm": 0.31181836853952816, |
|
"learning_rate": 2.5335140712706637e-06, |
|
"loss": 0.4866, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.5949439270100743, |
|
"grad_norm": 0.3030338685118879, |
|
"learning_rate": 2.5142443668114184e-06, |
|
"loss": 0.4618, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.596464550465691, |
|
"grad_norm": 0.3270760691359473, |
|
"learning_rate": 2.495040727369997e-06, |
|
"loss": 0.5345, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.5979851739213078, |
|
"grad_norm": 0.31027653370341124, |
|
"learning_rate": 2.4759032679080646e-06, |
|
"loss": 0.4881, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.5995057973769247, |
|
"grad_norm": 0.3062497695212509, |
|
"learning_rate": 2.4568321029910926e-06, |
|
"loss": 0.4787, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.6010264208325413, |
|
"grad_norm": 0.32621381275453964, |
|
"learning_rate": 2.4378273467876895e-06, |
|
"loss": 0.525, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.6025470442881582, |
|
"grad_norm": 0.3318069849664119, |
|
"learning_rate": 2.4188891130689163e-06, |
|
"loss": 0.4909, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.604067667743775, |
|
"grad_norm": 0.32860192649298986, |
|
"learning_rate": 2.4000175152075832e-06, |
|
"loss": 0.5047, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.6055882911993917, |
|
"grad_norm": 0.32129364609495753, |
|
"learning_rate": 2.3812126661776048e-06, |
|
"loss": 0.49, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.6071089146550086, |
|
"grad_norm": 0.34479880933344564, |
|
"learning_rate": 2.3624746785533015e-06, |
|
"loss": 0.5202, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.6086295381106255, |
|
"grad_norm": 0.32050670560042915, |
|
"learning_rate": 2.3438036645087323e-06, |
|
"loss": 0.5045, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.6101501615662421, |
|
"grad_norm": 0.3236770692201634, |
|
"learning_rate": 2.325199735817016e-06, |
|
"loss": 0.5007, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.611670785021859, |
|
"grad_norm": 0.3182094882577364, |
|
"learning_rate": 2.306663003849674e-06, |
|
"loss": 0.4972, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.6131914084774759, |
|
"grad_norm": 0.31720365603712886, |
|
"learning_rate": 2.2881935795759588e-06, |
|
"loss": 0.4716, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.6147120319330925, |
|
"grad_norm": 0.32974314670568317, |
|
"learning_rate": 2.2697915735621873e-06, |
|
"loss": 0.484, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.6162326553887094, |
|
"grad_norm": 0.30577586884242175, |
|
"learning_rate": 2.2514570959710736e-06, |
|
"loss": 0.4735, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.6177532788443263, |
|
"grad_norm": 0.3197001472064691, |
|
"learning_rate": 2.2331902565610855e-06, |
|
"loss": 0.5218, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.619273902299943, |
|
"grad_norm": 0.3166847446707726, |
|
"learning_rate": 2.214991164685776e-06, |
|
"loss": 0.4903, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.6207945257555598, |
|
"grad_norm": 0.3195718257381146, |
|
"learning_rate": 2.1968599292931262e-06, |
|
"loss": 0.4971, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.6223151492111767, |
|
"grad_norm": 0.31426671771687575, |
|
"learning_rate": 2.178796658924904e-06, |
|
"loss": 0.5064, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.6238357726667934, |
|
"grad_norm": 0.3212716102723848, |
|
"learning_rate": 2.160801461715996e-06, |
|
"loss": 0.5079, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.6253563961224102, |
|
"grad_norm": 0.3191289490920015, |
|
"learning_rate": 2.1428744453937935e-06, |
|
"loss": 0.4761, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.626877019578027, |
|
"grad_norm": 0.3159849608628463, |
|
"learning_rate": 2.125015717277504e-06, |
|
"loss": 0.5033, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.6283976430336438, |
|
"grad_norm": 0.31696066878201823, |
|
"learning_rate": 2.1072253842775457e-06, |
|
"loss": 0.4788, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.6299182664892606, |
|
"grad_norm": 0.3258225759542187, |
|
"learning_rate": 2.089503552894881e-06, |
|
"loss": 0.5099, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.6314388899448775, |
|
"grad_norm": 0.3097721381921143, |
|
"learning_rate": 2.0718503292204057e-06, |
|
"loss": 0.4837, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.6329595134004942, |
|
"grad_norm": 0.32471560408203043, |
|
"learning_rate": 2.054265818934283e-06, |
|
"loss": 0.523, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.634480136856111, |
|
"grad_norm": 0.30086700392629573, |
|
"learning_rate": 2.036750127305341e-06, |
|
"loss": 0.4751, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.636000760311728, |
|
"grad_norm": 0.306302856546338, |
|
"learning_rate": 2.0193033591904125e-06, |
|
"loss": 0.4734, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.6375213837673446, |
|
"grad_norm": 0.30581159890991627, |
|
"learning_rate": 2.001925619033737e-06, |
|
"loss": 0.4881, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.6390420072229615, |
|
"grad_norm": 0.3148442376719998, |
|
"learning_rate": 1.9846170108663187e-06, |
|
"loss": 0.4862, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.6405626306785783, |
|
"grad_norm": 0.3187412303526785, |
|
"learning_rate": 1.9673776383052978e-06, |
|
"loss": 0.5088, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.642083254134195, |
|
"grad_norm": 0.3237394151376111, |
|
"learning_rate": 1.9502076045533484e-06, |
|
"loss": 0.5127, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.6436038775898119, |
|
"grad_norm": 0.30397789760314514, |
|
"learning_rate": 1.9331070123980477e-06, |
|
"loss": 0.4906, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.6451245010454287, |
|
"grad_norm": 0.31316588193164063, |
|
"learning_rate": 1.916075964211268e-06, |
|
"loss": 0.496, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.6466451245010454, |
|
"grad_norm": 0.33024840735397576, |
|
"learning_rate": 1.8991145619485479e-06, |
|
"loss": 0.5203, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.6481657479566623, |
|
"grad_norm": 0.3409025779210624, |
|
"learning_rate": 1.8822229071485085e-06, |
|
"loss": 0.5002, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.6496863714122791, |
|
"grad_norm": 0.3287972930467973, |
|
"learning_rate": 1.8654011009322228e-06, |
|
"loss": 0.4995, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.6512069948678958, |
|
"grad_norm": 0.32623077457196087, |
|
"learning_rate": 1.8486492440026244e-06, |
|
"loss": 0.487, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.6527276183235127, |
|
"grad_norm": 0.3106687896956913, |
|
"learning_rate": 1.83196743664389e-06, |
|
"loss": 0.4946, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.6542482417791295, |
|
"grad_norm": 0.32812705879714626, |
|
"learning_rate": 1.8153557787208556e-06, |
|
"loss": 0.4903, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.6557688652347462, |
|
"grad_norm": 0.31890170792842726, |
|
"learning_rate": 1.7988143696784124e-06, |
|
"loss": 0.4821, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.657289488690363, |
|
"grad_norm": 0.31284347487731023, |
|
"learning_rate": 1.7823433085409e-06, |
|
"loss": 0.4931, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.65881011214598, |
|
"grad_norm": 0.30982765014249525, |
|
"learning_rate": 1.7659426939115348e-06, |
|
"loss": 0.4683, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.6603307356015966, |
|
"grad_norm": 0.3164620121768728, |
|
"learning_rate": 1.7496126239717968e-06, |
|
"loss": 0.5007, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.6618513590572135, |
|
"grad_norm": 0.3479489881630459, |
|
"learning_rate": 1.7333531964808702e-06, |
|
"loss": 0.5055, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.6633719825128304, |
|
"grad_norm": 0.3208880246840334, |
|
"learning_rate": 1.7171645087750255e-06, |
|
"loss": 0.4876, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.664892605968447, |
|
"grad_norm": 0.32500628409148635, |
|
"learning_rate": 1.7010466577670633e-06, |
|
"loss": 0.5185, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.666413229424064, |
|
"grad_norm": 0.3151870780999753, |
|
"learning_rate": 1.684999739945713e-06, |
|
"loss": 0.5224, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.6679338528796808, |
|
"grad_norm": 0.30364068565385566, |
|
"learning_rate": 1.669023851375082e-06, |
|
"loss": 0.4843, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.6694544763352974, |
|
"grad_norm": 0.32280756678101397, |
|
"learning_rate": 1.6531190876940478e-06, |
|
"loss": 0.5129, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.6709750997909143, |
|
"grad_norm": 0.3058304028983153, |
|
"learning_rate": 1.6372855441157166e-06, |
|
"loss": 0.4708, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.6724957232465312, |
|
"grad_norm": 0.34052506206539823, |
|
"learning_rate": 1.6215233154268233e-06, |
|
"loss": 0.5272, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.6740163467021478, |
|
"grad_norm": 0.32203401759522543, |
|
"learning_rate": 1.6058324959871942e-06, |
|
"loss": 0.4975, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.6755369701577647, |
|
"grad_norm": 0.3192131968898841, |
|
"learning_rate": 1.5902131797291616e-06, |
|
"loss": 0.4892, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.6770575936133816, |
|
"grad_norm": 0.30564977758041467, |
|
"learning_rate": 1.5746654601570045e-06, |
|
"loss": 0.4891, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.6785782170689982, |
|
"grad_norm": 0.3174124356131291, |
|
"learning_rate": 1.559189430346397e-06, |
|
"loss": 0.4948, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.6800988405246151, |
|
"grad_norm": 0.3094722335828649, |
|
"learning_rate": 1.5437851829438422e-06, |
|
"loss": 0.488, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.681619463980232, |
|
"grad_norm": 0.3131260119981047, |
|
"learning_rate": 1.5284528101661273e-06, |
|
"loss": 0.4664, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.6831400874358486, |
|
"grad_norm": 0.31529735958006433, |
|
"learning_rate": 1.5131924037997535e-06, |
|
"loss": 0.4951, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.6846607108914655, |
|
"grad_norm": 0.30173098838134094, |
|
"learning_rate": 1.4980040552004121e-06, |
|
"loss": 0.4903, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.6861813343470824, |
|
"grad_norm": 0.30988057448857725, |
|
"learning_rate": 1.4828878552924145e-06, |
|
"loss": 0.49, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.687701957802699, |
|
"grad_norm": 0.3077026541913675, |
|
"learning_rate": 1.4678438945681649e-06, |
|
"loss": 0.4849, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.689222581258316, |
|
"grad_norm": 0.31664882915707754, |
|
"learning_rate": 1.4528722630876041e-06, |
|
"loss": 0.4972, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.6907432047139328, |
|
"grad_norm": 0.31667781766589775, |
|
"learning_rate": 1.4379730504776807e-06, |
|
"loss": 0.4907, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.6922638281695495, |
|
"grad_norm": 0.30777745107859716, |
|
"learning_rate": 1.4231463459318138e-06, |
|
"loss": 0.4791, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.6937844516251663, |
|
"grad_norm": 0.3802082342644203, |
|
"learning_rate": 1.4083922382093484e-06, |
|
"loss": 0.4925, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.6953050750807832, |
|
"grad_norm": 0.319724112059326, |
|
"learning_rate": 1.3937108156350437e-06, |
|
"loss": 0.5069, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.6968256985363999, |
|
"grad_norm": 0.32744646943463035, |
|
"learning_rate": 1.379102166098519e-06, |
|
"loss": 0.4892, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.6983463219920167, |
|
"grad_norm": 0.33236823986934483, |
|
"learning_rate": 1.3645663770537586e-06, |
|
"loss": 0.5106, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.6998669454476336, |
|
"grad_norm": 0.2988679855910158, |
|
"learning_rate": 1.350103535518552e-06, |
|
"loss": 0.4792, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.7013875689032503, |
|
"grad_norm": 0.31594423002452454, |
|
"learning_rate": 1.3357137280740085e-06, |
|
"loss": 0.5113, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.7029081923588671, |
|
"grad_norm": 0.30589751459809705, |
|
"learning_rate": 1.3213970408640052e-06, |
|
"loss": 0.4611, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.704428815814484, |
|
"grad_norm": 0.32052631965810274, |
|
"learning_rate": 1.307153559594711e-06, |
|
"loss": 0.5103, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.7059494392701007, |
|
"grad_norm": 0.3293578066194276, |
|
"learning_rate": 1.2929833695340287e-06, |
|
"loss": 0.4955, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.7074700627257176, |
|
"grad_norm": 0.30301511320734814, |
|
"learning_rate": 1.2788865555111225e-06, |
|
"loss": 0.457, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.7089906861813344, |
|
"grad_norm": 0.31166180214752437, |
|
"learning_rate": 1.264863201915883e-06, |
|
"loss": 0.4976, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.710511309636951, |
|
"grad_norm": 0.3163518521392497, |
|
"learning_rate": 1.2509133926984408e-06, |
|
"loss": 0.4966, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.712031933092568, |
|
"grad_norm": 0.3540193363362305, |
|
"learning_rate": 1.2370372113686581e-06, |
|
"loss": 0.5157, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.7135525565481848, |
|
"grad_norm": 0.3054649208614663, |
|
"learning_rate": 1.2232347409956184e-06, |
|
"loss": 0.4674, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.7150731800038015, |
|
"grad_norm": 0.31124066442535125, |
|
"learning_rate": 1.2095060642071457e-06, |
|
"loss": 0.4922, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.7165938034594184, |
|
"grad_norm": 0.31790128208883095, |
|
"learning_rate": 1.1958512631893e-06, |
|
"loss": 0.4973, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.7181144269150352, |
|
"grad_norm": 0.3165756985087125, |
|
"learning_rate": 1.1822704196858915e-06, |
|
"loss": 0.503, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.719635050370652, |
|
"grad_norm": 0.32017884174072114, |
|
"learning_rate": 1.1687636149979783e-06, |
|
"loss": 0.4865, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.7211556738262688, |
|
"grad_norm": 0.31138001106614027, |
|
"learning_rate": 1.155330929983396e-06, |
|
"loss": 0.5083, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.7226762972818856, |
|
"grad_norm": 0.3178786337486812, |
|
"learning_rate": 1.1419724450562654e-06, |
|
"loss": 0.4909, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.7241969207375023, |
|
"grad_norm": 0.30532938139355004, |
|
"learning_rate": 1.1286882401865111e-06, |
|
"loss": 0.472, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.7257175441931192, |
|
"grad_norm": 0.3296827200280989, |
|
"learning_rate": 1.1154783948993806e-06, |
|
"loss": 0.5244, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.727238167648736, |
|
"grad_norm": 0.31508066429540393, |
|
"learning_rate": 1.1023429882749769e-06, |
|
"loss": 0.5102, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.7287587911043527, |
|
"grad_norm": 0.3196487275495865, |
|
"learning_rate": 1.0892820989477799e-06, |
|
"loss": 0.4768, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.7302794145599696, |
|
"grad_norm": 0.30207628232666384, |
|
"learning_rate": 1.0762958051061657e-06, |
|
"loss": 0.4691, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.7318000380155865, |
|
"grad_norm": 0.3054462008267432, |
|
"learning_rate": 1.0633841844919598e-06, |
|
"loss": 0.4862, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.7333206614712031, |
|
"grad_norm": 0.3078006467585517, |
|
"learning_rate": 1.0505473143999502e-06, |
|
"loss": 0.4875, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.73484128492682, |
|
"grad_norm": 0.3053091170945442, |
|
"learning_rate": 1.0377852716774458e-06, |
|
"loss": 0.5073, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.7363619083824369, |
|
"grad_norm": 0.3076690595978604, |
|
"learning_rate": 1.0250981327237949e-06, |
|
"loss": 0.4949, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.7378825318380535, |
|
"grad_norm": 0.32895341988079463, |
|
"learning_rate": 1.012485973489944e-06, |
|
"loss": 0.4841, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.7394031552936704, |
|
"grad_norm": 0.3253480760618867, |
|
"learning_rate": 9.99948869477969e-07, |
|
"loss": 0.5004, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.7409237787492873, |
|
"grad_norm": 0.3259517227944411, |
|
"learning_rate": 9.874868957406476e-07, |
|
"loss": 0.5077, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.742444402204904, |
|
"grad_norm": 0.30986669451274373, |
|
"learning_rate": 9.751001268809732e-07, |
|
"loss": 0.4904, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.7439650256605208, |
|
"grad_norm": 0.3174807363161579, |
|
"learning_rate": 9.627886370517466e-07, |
|
"loss": 0.5058, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.7454856491161377, |
|
"grad_norm": 0.3060614656643119, |
|
"learning_rate": 9.505524999551009e-07, |
|
"loss": 0.4807, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.7470062725717543, |
|
"grad_norm": 0.3104685490140639, |
|
"learning_rate": 9.383917888420816e-07, |
|
"loss": 0.4892, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.7485268960273712, |
|
"grad_norm": 0.31773900649056236, |
|
"learning_rate": 9.263065765122013e-07, |
|
"loss": 0.4974, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.750047519482988, |
|
"grad_norm": 0.3218554742602446, |
|
"learning_rate": 9.142969353129946e-07, |
|
"loss": 0.4846, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.7515681429386047, |
|
"grad_norm": 0.3060712871920658, |
|
"learning_rate": 9.023629371396e-07, |
|
"loss": 0.4787, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.7530887663942216, |
|
"grad_norm": 0.3057039287255903, |
|
"learning_rate": 8.905046534343228e-07, |
|
"loss": 0.4888, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.7546093898498385, |
|
"grad_norm": 0.33483369302675053, |
|
"learning_rate": 8.787221551862048e-07, |
|
"loss": 0.5001, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.7561300133054552, |
|
"grad_norm": 0.3104906521034538, |
|
"learning_rate": 8.670155129306e-07, |
|
"loss": 0.496, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.757650636761072, |
|
"grad_norm": 0.3073575609788421, |
|
"learning_rate": 8.55384796748759e-07, |
|
"loss": 0.4905, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.759171260216689, |
|
"grad_norm": 0.3150067268140591, |
|
"learning_rate": 8.438300762674001e-07, |
|
"loss": 0.4872, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.7606918836723056, |
|
"grad_norm": 0.30605216319902684, |
|
"learning_rate": 8.323514206583039e-07, |
|
"loss": 0.4935, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.7622125071279224, |
|
"grad_norm": 0.3177954531343794, |
|
"learning_rate": 8.20948898637881e-07, |
|
"loss": 0.4916, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.7637331305835393, |
|
"grad_norm": 0.31828297184631565, |
|
"learning_rate": 8.09622578466782e-07, |
|
"loss": 0.4982, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.765253754039156, |
|
"grad_norm": 0.3112173769721995, |
|
"learning_rate": 7.983725279494758e-07, |
|
"loss": 0.4857, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.7667743774947728, |
|
"grad_norm": 0.31585454715709904, |
|
"learning_rate": 7.871988144338443e-07, |
|
"loss": 0.4784, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.7682950009503897, |
|
"grad_norm": 0.32127393064790427, |
|
"learning_rate": 7.761015048107861e-07, |
|
"loss": 0.5075, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.7698156244060064, |
|
"grad_norm": 0.30794451953754653, |
|
"learning_rate": 7.650806655138031e-07, |
|
"loss": 0.5038, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.7713362478616232, |
|
"grad_norm": 0.31181423286855536, |
|
"learning_rate": 7.541363625186259e-07, |
|
"loss": 0.5005, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.7728568713172401, |
|
"grad_norm": 0.32245652816729514, |
|
"learning_rate": 7.432686613427891e-07, |
|
"loss": 0.5095, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.7743774947728568, |
|
"grad_norm": 0.3242738181594943, |
|
"learning_rate": 7.324776270452638e-07, |
|
"loss": 0.52, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.7758981182284737, |
|
"grad_norm": 0.3110243759601029, |
|
"learning_rate": 7.217633242260516e-07, |
|
"loss": 0.464, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.7774187416840905, |
|
"grad_norm": 0.3319624462691413, |
|
"learning_rate": 7.111258170258142e-07, |
|
"loss": 0.5098, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.7789393651397072, |
|
"grad_norm": 0.3068597543181992, |
|
"learning_rate": 7.005651691254696e-07, |
|
"loss": 0.4815, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.780459988595324, |
|
"grad_norm": 0.3157827076249044, |
|
"learning_rate": 6.900814437458295e-07, |
|
"loss": 0.5024, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.781980612050941, |
|
"grad_norm": 0.30638021830468265, |
|
"learning_rate": 6.79674703647207e-07, |
|
"loss": 0.4798, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.7835012355065576, |
|
"grad_norm": 0.33125316725002335, |
|
"learning_rate": 6.693450111290479e-07, |
|
"loss": 0.5069, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.7850218589621745, |
|
"grad_norm": 0.325386774890156, |
|
"learning_rate": 6.59092428029562e-07, |
|
"loss": 0.5246, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.7865424824177913, |
|
"grad_norm": 0.2985022483714369, |
|
"learning_rate": 6.489170157253382e-07, |
|
"loss": 0.4852, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.788063105873408, |
|
"grad_norm": 0.3102718525215574, |
|
"learning_rate": 6.388188351309907e-07, |
|
"loss": 0.5006, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.7895837293290249, |
|
"grad_norm": 0.31889832653696915, |
|
"learning_rate": 6.287979466987909e-07, |
|
"loss": 0.49, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.7911043527846418, |
|
"grad_norm": 0.31148436424823517, |
|
"learning_rate": 6.188544104183059e-07, |
|
"loss": 0.4733, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.7926249762402584, |
|
"grad_norm": 0.31317825721031184, |
|
"learning_rate": 6.089882858160299e-07, |
|
"loss": 0.4879, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.7941455996958753, |
|
"grad_norm": 0.31818518365850107, |
|
"learning_rate": 5.991996319550448e-07, |
|
"loss": 0.5198, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.7956662231514922, |
|
"grad_norm": 0.3120540099643981, |
|
"learning_rate": 5.894885074346511e-07, |
|
"loss": 0.4969, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.7971868466071088, |
|
"grad_norm": 0.31290703317847507, |
|
"learning_rate": 5.79854970390031e-07, |
|
"loss": 0.4902, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.7987074700627257, |
|
"grad_norm": 0.3128686096799172, |
|
"learning_rate": 5.702990784918843e-07, |
|
"loss": 0.4837, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.8002280935183426, |
|
"grad_norm": 0.3152764097292228, |
|
"learning_rate": 5.608208889460964e-07, |
|
"loss": 0.5082, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.8017487169739592, |
|
"grad_norm": 0.3238711254005141, |
|
"learning_rate": 5.514204584933931e-07, |
|
"loss": 0.4932, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.803269340429576, |
|
"grad_norm": 0.32742976059180334, |
|
"learning_rate": 5.420978434089952e-07, |
|
"loss": 0.5006, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.804789963885193, |
|
"grad_norm": 0.3120702271210168, |
|
"learning_rate": 5.328530995022905e-07, |
|
"loss": 0.5077, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.8063105873408096, |
|
"grad_norm": 0.3176329082136289, |
|
"learning_rate": 5.236862821164854e-07, |
|
"loss": 0.5182, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.8078312107964265, |
|
"grad_norm": 0.3014290469656112, |
|
"learning_rate": 5.145974461282964e-07, |
|
"loss": 0.484, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.8093518342520434, |
|
"grad_norm": 0.3214449630653542, |
|
"learning_rate": 5.055866459475958e-07, |
|
"loss": 0.4808, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.81087245770766, |
|
"grad_norm": 0.32637831086774916, |
|
"learning_rate": 4.966539355171074e-07, |
|
"loss": 0.5317, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.812393081163277, |
|
"grad_norm": 0.3111952143588352, |
|
"learning_rate": 4.877993683120647e-07, |
|
"loss": 0.48, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.8139137046188938, |
|
"grad_norm": 0.3159439961230018, |
|
"learning_rate": 4.790229973399132e-07, |
|
"loss": 0.5355, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.8154343280745104, |
|
"grad_norm": 0.32606917992981577, |
|
"learning_rate": 4.7032487513997023e-07, |
|
"loss": 0.4854, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.8169549515301273, |
|
"grad_norm": 0.3099232220873522, |
|
"learning_rate": 4.617050537831259e-07, |
|
"loss": 0.4744, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.8184755749857442, |
|
"grad_norm": 0.3189690329444183, |
|
"learning_rate": 4.5316358487152045e-07, |
|
"loss": 0.5154, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.8199961984413608, |
|
"grad_norm": 0.3245800528868221, |
|
"learning_rate": 4.447005195382475e-07, |
|
"loss": 0.486, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.8215168218969777, |
|
"grad_norm": 0.3265685397280179, |
|
"learning_rate": 4.3631590844703997e-07, |
|
"loss": 0.5012, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.8230374453525946, |
|
"grad_norm": 0.3158110920991978, |
|
"learning_rate": 4.2800980179196426e-07, |
|
"loss": 0.4975, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.8245580688082113, |
|
"grad_norm": 0.30590044892564083, |
|
"learning_rate": 4.197822492971273e-07, |
|
"loss": 0.4761, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.8260786922638281, |
|
"grad_norm": 0.3296107652675287, |
|
"learning_rate": 4.1163330021637403e-07, |
|
"loss": 0.5024, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.827599315719445, |
|
"grad_norm": 0.31081520867244933, |
|
"learning_rate": 4.0356300333299696e-07, |
|
"loss": 0.4861, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.8291199391750617, |
|
"grad_norm": 0.31577863324052, |
|
"learning_rate": 3.9557140695943544e-07, |
|
"loss": 0.5023, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.8306405626306785, |
|
"grad_norm": 0.30718130485703893, |
|
"learning_rate": 3.8765855893699576e-07, |
|
"loss": 0.4847, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.8321611860862954, |
|
"grad_norm": 0.3044770096969053, |
|
"learning_rate": 3.798245066355609e-07, |
|
"loss": 0.4774, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.833681809541912, |
|
"grad_norm": 0.3233110025620584, |
|
"learning_rate": 3.7206929695330764e-07, |
|
"loss": 0.516, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.835202432997529, |
|
"grad_norm": 0.3184528078482147, |
|
"learning_rate": 3.643929763164223e-07, |
|
"loss": 0.5002, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.8367230564531458, |
|
"grad_norm": 0.3162950503495864, |
|
"learning_rate": 3.567955906788252e-07, |
|
"loss": 0.5208, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.8382436799087625, |
|
"grad_norm": 0.3032249377512437, |
|
"learning_rate": 3.492771855219034e-07, |
|
"loss": 0.4749, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.8397643033643793, |
|
"grad_norm": 0.32645828201108573, |
|
"learning_rate": 3.418378058542228e-07, |
|
"loss": 0.5212, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.8412849268199962, |
|
"grad_norm": 0.31381954625492664, |
|
"learning_rate": 3.344774962112713e-07, |
|
"loss": 0.4697, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.8428055502756129, |
|
"grad_norm": 0.3163864792118917, |
|
"learning_rate": 3.2719630065518326e-07, |
|
"loss": 0.5161, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.8443261737312298, |
|
"grad_norm": 0.29974584672007193, |
|
"learning_rate": 3.199942627744915e-07, |
|
"loss": 0.4769, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.8458467971868466, |
|
"grad_norm": 0.32581659125357904, |
|
"learning_rate": 3.1287142568384115e-07, |
|
"loss": 0.485, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.8473674206424633, |
|
"grad_norm": 0.32074295034449724, |
|
"learning_rate": 3.058278320237554e-07, |
|
"loss": 0.4896, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.8488880440980802, |
|
"grad_norm": 0.30909473328904485, |
|
"learning_rate": 2.9886352396036584e-07, |
|
"loss": 0.4768, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.850408667553697, |
|
"grad_norm": 0.32336521053629386, |
|
"learning_rate": 2.919785431851674e-07, |
|
"loss": 0.4999, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.8519292910093137, |
|
"grad_norm": 0.305906498433603, |
|
"learning_rate": 2.851729309147642e-07, |
|
"loss": 0.4998, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.8534499144649306, |
|
"grad_norm": 0.30991453199000357, |
|
"learning_rate": 2.7844672789062724e-07, |
|
"loss": 0.5011, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.8549705379205474, |
|
"grad_norm": 0.314943371794473, |
|
"learning_rate": 2.717999743788416e-07, |
|
"loss": 0.5096, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.856491161376164, |
|
"grad_norm": 0.3150834801717786, |
|
"learning_rate": 2.652327101698796e-07, |
|
"loss": 0.504, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.858011784831781, |
|
"grad_norm": 0.32620085243083274, |
|
"learning_rate": 2.587449745783506e-07, |
|
"loss": 0.5239, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.8595324082873979, |
|
"grad_norm": 0.31548675603667015, |
|
"learning_rate": 2.5233680644276673e-07, |
|
"loss": 0.4889, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.8610530317430145, |
|
"grad_norm": 0.32956663567551076, |
|
"learning_rate": 2.4600824412532154e-07, |
|
"loss": 0.5108, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.8625736551986314, |
|
"grad_norm": 0.29811803143834026, |
|
"learning_rate": 2.3975932551164486e-07, |
|
"loss": 0.4616, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.8640942786542483, |
|
"grad_norm": 0.3118381439138631, |
|
"learning_rate": 2.335900880105892e-07, |
|
"loss": 0.5051, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.865614902109865, |
|
"grad_norm": 0.30830068205472616, |
|
"learning_rate": 2.27500568553992e-07, |
|
"loss": 0.5012, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.867135525565482, |
|
"grad_norm": 0.304947912980323, |
|
"learning_rate": 2.2149080359646954e-07, |
|
"loss": 0.4583, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.8686561490210987, |
|
"grad_norm": 0.3114996653388056, |
|
"learning_rate": 2.1556082911518989e-07, |
|
"loss": 0.4898, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.8701767724767153, |
|
"grad_norm": 0.30177416022258724, |
|
"learning_rate": 2.0971068060965908e-07, |
|
"loss": 0.4965, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.8716973959323324, |
|
"grad_norm": 0.3309823897680051, |
|
"learning_rate": 2.0394039310150775e-07, |
|
"loss": 0.5185, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.873218019387949, |
|
"grad_norm": 0.32115239410578134, |
|
"learning_rate": 1.9825000113428182e-07, |
|
"loss": 0.5228, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.8747386428435657, |
|
"grad_norm": 0.3075640176251186, |
|
"learning_rate": 1.9263953877323952e-07, |
|
"loss": 0.4882, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.8762592662991828, |
|
"grad_norm": 0.31724674747730425, |
|
"learning_rate": 1.8710903960514225e-07, |
|
"loss": 0.5207, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.8777798897547995, |
|
"grad_norm": 0.3164911565117331, |
|
"learning_rate": 1.8165853673805754e-07, |
|
"loss": 0.5032, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.8793005132104161, |
|
"grad_norm": 0.30881540155384896, |
|
"learning_rate": 1.7628806280115286e-07, |
|
"loss": 0.5033, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.8808211366660332, |
|
"grad_norm": 0.31442427065508066, |
|
"learning_rate": 1.7099764994451522e-07, |
|
"loss": 0.5076, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.8823417601216499, |
|
"grad_norm": 0.31278434986007647, |
|
"learning_rate": 1.65787329838945e-07, |
|
"loss": 0.4989, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.8838623835772665, |
|
"grad_norm": 0.3025781565220422, |
|
"learning_rate": 1.60657133675771e-07, |
|
"loss": 0.5014, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.8853830070328836, |
|
"grad_norm": 0.30236403268840834, |
|
"learning_rate": 1.556070921666637e-07, |
|
"loss": 0.4653, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.8869036304885003, |
|
"grad_norm": 0.32144829401638425, |
|
"learning_rate": 1.5063723554345349e-07, |
|
"loss": 0.5022, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.888424253944117, |
|
"grad_norm": 0.3081811486598015, |
|
"learning_rate": 1.4574759355794846e-07, |
|
"loss": 0.468, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.889944877399734, |
|
"grad_norm": 0.3110080086508686, |
|
"learning_rate": 1.4093819548175095e-07, |
|
"loss": 0.4821, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.8914655008553507, |
|
"grad_norm": 0.30999522834801807, |
|
"learning_rate": 1.3620907010609206e-07, |
|
"loss": 0.475, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.8929861243109674, |
|
"grad_norm": 0.3244026681645176, |
|
"learning_rate": 1.3156024574165258e-07, |
|
"loss": 0.4967, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.8945067477665845, |
|
"grad_norm": 0.30596722710536933, |
|
"learning_rate": 1.2699175021839463e-07, |
|
"loss": 0.4794, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.896027371222201, |
|
"grad_norm": 0.33281008993735, |
|
"learning_rate": 1.2250361088539598e-07, |
|
"loss": 0.5202, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.8975479946778178, |
|
"grad_norm": 0.3092714395241654, |
|
"learning_rate": 1.1809585461068329e-07, |
|
"loss": 0.4897, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.8990686181334349, |
|
"grad_norm": 0.30328721741220116, |
|
"learning_rate": 1.1376850778108004e-07, |
|
"loss": 0.4734, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.9005892415890515, |
|
"grad_norm": 0.31671920046718893, |
|
"learning_rate": 1.0952159630203802e-07, |
|
"loss": 0.495, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.9021098650446682, |
|
"grad_norm": 0.301311429340991, |
|
"learning_rate": 1.0535514559748699e-07, |
|
"loss": 0.4879, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.9036304885002853, |
|
"grad_norm": 0.31829608262735665, |
|
"learning_rate": 1.0126918060968266e-07, |
|
"loss": 0.5054, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.905151111955902, |
|
"grad_norm": 0.3087155473121018, |
|
"learning_rate": 9.726372579905783e-08, |
|
"loss": 0.4972, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.9066717354115186, |
|
"grad_norm": 0.3080173664665244, |
|
"learning_rate": 9.33388051440734e-08, |
|
"loss": 0.5339, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.9081923588671357, |
|
"grad_norm": 0.3282773688288346, |
|
"learning_rate": 8.949444214107697e-08, |
|
"loss": 0.4775, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.9097129823227523, |
|
"grad_norm": 0.33035543404288714, |
|
"learning_rate": 8.573065980416298e-08, |
|
"loss": 0.5071, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.911233605778369, |
|
"grad_norm": 0.3215706061649872, |
|
"learning_rate": 8.204748066503122e-08, |
|
"loss": 0.5094, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.912754229233986, |
|
"grad_norm": 0.2988811222168615, |
|
"learning_rate": 7.844492677285754e-08, |
|
"loss": 0.4984, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.9142748526896027, |
|
"grad_norm": 0.31431504366013957, |
|
"learning_rate": 7.492301969415391e-08, |
|
"loss": 0.5072, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.9157954761452194, |
|
"grad_norm": 0.3079234038112652, |
|
"learning_rate": 7.148178051264804e-08, |
|
"loss": 0.4883, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.9173160996008365, |
|
"grad_norm": 0.30748269412345614, |
|
"learning_rate": 6.812122982915408e-08, |
|
"loss": 0.5152, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.9188367230564531, |
|
"grad_norm": 0.3111607368191631, |
|
"learning_rate": 6.484138776144467e-08, |
|
"loss": 0.4992, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.9203573465120698, |
|
"grad_norm": 0.3130854739102124, |
|
"learning_rate": 6.164227394413516e-08, |
|
"loss": 0.4947, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.921877969967687, |
|
"grad_norm": 0.29735483922959655, |
|
"learning_rate": 5.852390752856624e-08, |
|
"loss": 0.4717, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.9233985934233035, |
|
"grad_norm": 0.30465543049913124, |
|
"learning_rate": 5.548630718268813e-08, |
|
"loss": 0.4977, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.9249192168789202, |
|
"grad_norm": 0.3133152057012344, |
|
"learning_rate": 5.252949109094924e-08, |
|
"loss": 0.4839, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.9264398403345373, |
|
"grad_norm": 0.32301869827083196, |
|
"learning_rate": 4.965347695418483e-08, |
|
"loss": 0.4755, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.927960463790154, |
|
"grad_norm": 0.30143456487467507, |
|
"learning_rate": 4.685828198951479e-08, |
|
"loss": 0.4901, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.9294810872457706, |
|
"grad_norm": 0.3198896489356579, |
|
"learning_rate": 4.4143922930239736e-08, |
|
"loss": 0.5014, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.9310017107013877, |
|
"grad_norm": 0.29774733042477136, |
|
"learning_rate": 4.1510416025737274e-08, |
|
"loss": 0.4598, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.9325223341570044, |
|
"grad_norm": 0.3117201437762231, |
|
"learning_rate": 3.895777704137174e-08, |
|
"loss": 0.4718, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.934042957612621, |
|
"grad_norm": 0.30970105466061604, |
|
"learning_rate": 3.648602125839037e-08, |
|
"loss": 0.4925, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.9355635810682381, |
|
"grad_norm": 0.3032281003832038, |
|
"learning_rate": 3.409516347384058e-08, |
|
"loss": 0.4876, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.9370842045238548, |
|
"grad_norm": 0.29947248473886334, |
|
"learning_rate": 3.178521800047818e-08, |
|
"loss": 0.4652, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.9386048279794714, |
|
"grad_norm": 0.3101511675568205, |
|
"learning_rate": 2.9556198666678676e-08, |
|
"loss": 0.487, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.9401254514350885, |
|
"grad_norm": 0.3031822788379352, |
|
"learning_rate": 2.7408118816360456e-08, |
|
"loss": 0.4906, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.9416460748907052, |
|
"grad_norm": 0.3022071103062291, |
|
"learning_rate": 2.5340991308897615e-08, |
|
"loss": 0.4749, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.9431666983463218, |
|
"grad_norm": 0.3376492826020699, |
|
"learning_rate": 2.3354828519049215e-08, |
|
"loss": 0.5193, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.944687321801939, |
|
"grad_norm": 0.3049763317749277, |
|
"learning_rate": 2.144964233688558e-08, |
|
"loss": 0.492, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.9462079452575556, |
|
"grad_norm": 0.31915116281549216, |
|
"learning_rate": 1.9625444167708558e-08, |
|
"loss": 0.5077, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.9477285687131722, |
|
"grad_norm": 0.29789807615910413, |
|
"learning_rate": 1.7882244931992866e-08, |
|
"loss": 0.4802, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.9492491921687893, |
|
"grad_norm": 0.29936729470728324, |
|
"learning_rate": 1.622005506531838e-08, |
|
"loss": 0.4683, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.950769815624406, |
|
"grad_norm": 0.30557270295233097, |
|
"learning_rate": 1.4638884518302449e-08, |
|
"loss": 0.493, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.9522904390800226, |
|
"grad_norm": 0.3152644678698385, |
|
"learning_rate": 1.313874275654573e-08, |
|
"loss": 0.4872, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.9538110625356397, |
|
"grad_norm": 0.32155409817146613, |
|
"learning_rate": 1.1719638760576535e-08, |
|
"loss": 0.4898, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.9553316859912564, |
|
"grad_norm": 0.322441859921911, |
|
"learning_rate": 1.0381581025792154e-08, |
|
"loss": 0.5123, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.9568523094468733, |
|
"grad_norm": 0.3075162182309576, |
|
"learning_rate": 9.124577562409214e-09, |
|
"loss": 0.5075, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.9583729329024901, |
|
"grad_norm": 0.31214568646689916, |
|
"learning_rate": 7.948635895418555e-09, |
|
"loss": 0.5012, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.9598935563581068, |
|
"grad_norm": 0.3160685661400899, |
|
"learning_rate": 6.853763064538582e-09, |
|
"loss": 0.5009, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.9614141798137237, |
|
"grad_norm": 0.31359686192724745, |
|
"learning_rate": 5.8399656241716505e-09, |
|
"loss": 0.4962, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.9629348032693406, |
|
"grad_norm": 0.31993025547008763, |
|
"learning_rate": 4.907249643367956e-09, |
|
"loss": 0.4837, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.9644554267249572, |
|
"grad_norm": 0.30154655369767214, |
|
"learning_rate": 4.055620705787927e-09, |
|
"loss": 0.4907, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.965976050180574, |
|
"grad_norm": 0.3082339737443702, |
|
"learning_rate": 3.2850839096646157e-09, |
|
"loss": 0.49, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.967496673636191, |
|
"grad_norm": 0.31262848669610493, |
|
"learning_rate": 2.5956438677796293e-09, |
|
"loss": 0.5033, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.9690172970918076, |
|
"grad_norm": 0.3078427537941908, |
|
"learning_rate": 1.9873047074345453e-09, |
|
"loss": 0.4793, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.9705379205474245, |
|
"grad_norm": 0.3074339790589038, |
|
"learning_rate": 1.4600700704193227e-09, |
|
"loss": 0.4843, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.9720585440030414, |
|
"grad_norm": 0.31784494867260554, |
|
"learning_rate": 1.0139431130017695e-09, |
|
"loss": 0.4984, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.973579167458658, |
|
"grad_norm": 0.32037784960225285, |
|
"learning_rate": 6.489265058974569e-10, |
|
"loss": 0.5143, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.975099790914275, |
|
"grad_norm": 0.3115474055785006, |
|
"learning_rate": 3.6502243426069227e-10, |
|
"loss": 0.4958, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.9766204143698918, |
|
"grad_norm": 0.30740455562554947, |
|
"learning_rate": 1.622325976709804e-10, |
|
"loss": 0.4825, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.9781410378255084, |
|
"grad_norm": 0.3192120605150713, |
|
"learning_rate": 4.055821011798022e-11, |
|
"loss": 0.5028, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.9796616612811253, |
|
"grad_norm": 0.3128442843272166, |
|
"learning_rate": 0.0, |
|
"loss": 0.4886, |
|
"step": 1314 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1314, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 329, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4190576087400448.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|