|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 234, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008547008547008548, |
|
"grad_norm": 0.6883277297019958, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.0565, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017094017094017096, |
|
"grad_norm": 0.5167361497879028, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8421, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 0.8402963876724243, |
|
"learning_rate": 3e-06, |
|
"loss": 1.1245, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03418803418803419, |
|
"grad_norm": 0.930655300617218, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.432, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 0.5283745527267456, |
|
"learning_rate": 5e-06, |
|
"loss": 0.941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 1.1349669694900513, |
|
"learning_rate": 6e-06, |
|
"loss": 1.3429, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05982905982905983, |
|
"grad_norm": 1.173917293548584, |
|
"learning_rate": 7e-06, |
|
"loss": 0.9637, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06837606837606838, |
|
"grad_norm": 0.6507728099822998, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.0163, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 0.6534399390220642, |
|
"learning_rate": 9e-06, |
|
"loss": 0.9108, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 0.8090460300445557, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1224, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09401709401709402, |
|
"grad_norm": 1.183127760887146, |
|
"learning_rate": 9.999882372979835e-06, |
|
"loss": 1.1556, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 1.1587895154953003, |
|
"learning_rate": 9.999529497453782e-06, |
|
"loss": 0.9223, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 0.7878014445304871, |
|
"learning_rate": 9.998941390024924e-06, |
|
"loss": 1.0363, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11965811965811966, |
|
"grad_norm": 1.0422732830047607, |
|
"learning_rate": 9.998118078364186e-06, |
|
"loss": 1.1158, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 0.8618931174278259, |
|
"learning_rate": 9.99705960120905e-06, |
|
"loss": 1.1986, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13675213675213677, |
|
"grad_norm": 0.7314261198043823, |
|
"learning_rate": 9.99576600836172e-06, |
|
"loss": 0.9607, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1452991452991453, |
|
"grad_norm": 0.8001905679702759, |
|
"learning_rate": 9.994237360686784e-06, |
|
"loss": 1.3201, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.6340293288230896, |
|
"learning_rate": 9.992473730108354e-06, |
|
"loss": 0.8039, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1623931623931624, |
|
"grad_norm": 1.0305331945419312, |
|
"learning_rate": 9.990475199606672e-06, |
|
"loss": 0.987, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17094017094017094, |
|
"grad_norm": 0.5756571292877197, |
|
"learning_rate": 9.988241863214212e-06, |
|
"loss": 0.856, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1794871794871795, |
|
"grad_norm": 0.7210500836372375, |
|
"learning_rate": 9.985773826011256e-06, |
|
"loss": 0.9009, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18803418803418803, |
|
"grad_norm": 0.6321185231208801, |
|
"learning_rate": 9.98307120412095e-06, |
|
"loss": 0.9718, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19658119658119658, |
|
"grad_norm": 0.7177990078926086, |
|
"learning_rate": 9.980134124703837e-06, |
|
"loss": 1.0357, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 0.695940375328064, |
|
"learning_rate": 9.976962725951878e-06, |
|
"loss": 1.0613, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21367521367521367, |
|
"grad_norm": 0.7316240072250366, |
|
"learning_rate": 9.973557157081946e-06, |
|
"loss": 1.5432, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.1439138650894165, |
|
"learning_rate": 9.969917578328808e-06, |
|
"loss": 1.3765, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 0.7460082173347473, |
|
"learning_rate": 9.966044160937588e-06, |
|
"loss": 0.8814, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.23931623931623933, |
|
"grad_norm": 0.6455249190330505, |
|
"learning_rate": 9.961937087155697e-06, |
|
"loss": 0.889, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24786324786324787, |
|
"grad_norm": 0.7218654155731201, |
|
"learning_rate": 9.957596550224285e-06, |
|
"loss": 1.1877, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 0.7643616795539856, |
|
"learning_rate": 9.953022754369115e-06, |
|
"loss": 1.133, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26495726495726496, |
|
"grad_norm": 0.4708094298839569, |
|
"learning_rate": 9.94821591479098e-06, |
|
"loss": 0.8308, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.27350427350427353, |
|
"grad_norm": 0.5885545611381531, |
|
"learning_rate": 9.943176257655567e-06, |
|
"loss": 0.8915, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.28205128205128205, |
|
"grad_norm": 0.7514286637306213, |
|
"learning_rate": 9.937904020082815e-06, |
|
"loss": 1.063, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2905982905982906, |
|
"grad_norm": 0.541725754737854, |
|
"learning_rate": 9.932399450135765e-06, |
|
"loss": 0.9508, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.29914529914529914, |
|
"grad_norm": 0.5545334815979004, |
|
"learning_rate": 9.92666280680888e-06, |
|
"loss": 0.8066, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.47642382979393005, |
|
"learning_rate": 9.920694360015864e-06, |
|
"loss": 0.755, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3162393162393162, |
|
"grad_norm": 0.5091294050216675, |
|
"learning_rate": 9.914494390576958e-06, |
|
"loss": 0.7879, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3247863247863248, |
|
"grad_norm": 0.46325746178627014, |
|
"learning_rate": 9.908063190205739e-06, |
|
"loss": 0.83, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.5515424609184265, |
|
"learning_rate": 9.901401061495379e-06, |
|
"loss": 0.8476, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 0.5145699977874756, |
|
"learning_rate": 9.894508317904418e-06, |
|
"loss": 0.9449, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3504273504273504, |
|
"grad_norm": 0.46632590889930725, |
|
"learning_rate": 9.88738528374201e-06, |
|
"loss": 0.9886, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.358974358974359, |
|
"grad_norm": 0.41940009593963623, |
|
"learning_rate": 9.880032294152673e-06, |
|
"loss": 0.7544, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.36752136752136755, |
|
"grad_norm": 0.3798862099647522, |
|
"learning_rate": 9.872449695100503e-06, |
|
"loss": 0.77, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.37606837606837606, |
|
"grad_norm": 0.571315348148346, |
|
"learning_rate": 9.864637843352916e-06, |
|
"loss": 1.1002, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.44546273350715637, |
|
"learning_rate": 9.856597106463847e-06, |
|
"loss": 0.8818, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.39316239316239315, |
|
"grad_norm": 0.6359449028968811, |
|
"learning_rate": 9.848327862756466e-06, |
|
"loss": 0.8167, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4017094017094017, |
|
"grad_norm": 0.5933560729026794, |
|
"learning_rate": 9.839830501305371e-06, |
|
"loss": 0.873, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 0.36119118332862854, |
|
"learning_rate": 9.831105421918287e-06, |
|
"loss": 0.776, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4188034188034188, |
|
"grad_norm": 0.4318462312221527, |
|
"learning_rate": 9.822153035117246e-06, |
|
"loss": 0.7745, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.42735042735042733, |
|
"grad_norm": 0.5515265464782715, |
|
"learning_rate": 9.812973762119282e-06, |
|
"loss": 1.0686, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4358974358974359, |
|
"grad_norm": 0.3906237781047821, |
|
"learning_rate": 9.803568034816606e-06, |
|
"loss": 0.7159, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.3262993395328522, |
|
"learning_rate": 9.793936295756292e-06, |
|
"loss": 0.7008, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.452991452991453, |
|
"grad_norm": 0.43187564611434937, |
|
"learning_rate": 9.784078998119442e-06, |
|
"loss": 0.7201, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.3680849075317383, |
|
"learning_rate": 9.773996605699876e-06, |
|
"loss": 1.0274, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4700854700854701, |
|
"grad_norm": 0.32845616340637207, |
|
"learning_rate": 9.763689592882307e-06, |
|
"loss": 0.6843, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.47863247863247865, |
|
"grad_norm": 0.5680167078971863, |
|
"learning_rate": 9.753158444620013e-06, |
|
"loss": 1.1483, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.48717948717948717, |
|
"grad_norm": 0.4027453660964966, |
|
"learning_rate": 9.742403656412033e-06, |
|
"loss": 0.6624, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.49572649572649574, |
|
"grad_norm": 0.42083829641342163, |
|
"learning_rate": 9.73142573427984e-06, |
|
"loss": 0.8074, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5042735042735043, |
|
"grad_norm": 0.43723517656326294, |
|
"learning_rate": 9.720225194743544e-06, |
|
"loss": 0.7623, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.37138086557388306, |
|
"learning_rate": 9.70880256479758e-06, |
|
"loss": 0.7541, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5213675213675214, |
|
"grad_norm": 0.38942328095436096, |
|
"learning_rate": 9.697158381885915e-06, |
|
"loss": 0.7369, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5299145299145299, |
|
"grad_norm": 0.35463273525238037, |
|
"learning_rate": 9.685293193876766e-06, |
|
"loss": 0.6687, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.443660706281662, |
|
"learning_rate": 9.673207559036817e-06, |
|
"loss": 1.3078, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5470085470085471, |
|
"grad_norm": 0.42827773094177246, |
|
"learning_rate": 9.660902046004954e-06, |
|
"loss": 0.7356, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.6239178776741028, |
|
"learning_rate": 9.648377233765507e-06, |
|
"loss": 0.6916, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5641025641025641, |
|
"grad_norm": 0.40673717856407166, |
|
"learning_rate": 9.635633711621014e-06, |
|
"loss": 0.728, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5726495726495726, |
|
"grad_norm": 0.4105391800403595, |
|
"learning_rate": 9.622672079164487e-06, |
|
"loss": 0.811, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5811965811965812, |
|
"grad_norm": 0.37009334564208984, |
|
"learning_rate": 9.60949294625121e-06, |
|
"loss": 0.6723, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5897435897435898, |
|
"grad_norm": 0.37860628962516785, |
|
"learning_rate": 9.596096932970035e-06, |
|
"loss": 0.7644, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5982905982905983, |
|
"grad_norm": 0.36861270666122437, |
|
"learning_rate": 9.582484669614212e-06, |
|
"loss": 0.7353, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6068376068376068, |
|
"grad_norm": 0.3790634274482727, |
|
"learning_rate": 9.568656796651733e-06, |
|
"loss": 0.8376, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.5848673582077026, |
|
"learning_rate": 9.554613964695189e-06, |
|
"loss": 1.3309, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6239316239316239, |
|
"grad_norm": 0.3627384305000305, |
|
"learning_rate": 9.540356834471178e-06, |
|
"loss": 0.6774, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6324786324786325, |
|
"grad_norm": 0.37787535786628723, |
|
"learning_rate": 9.525886076789195e-06, |
|
"loss": 0.703, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 0.32273605465888977, |
|
"learning_rate": 9.511202372510083e-06, |
|
"loss": 0.7019, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6495726495726496, |
|
"grad_norm": 0.30288276076316833, |
|
"learning_rate": 9.496306412513989e-06, |
|
"loss": 0.7098, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6581196581196581, |
|
"grad_norm": 0.47629785537719727, |
|
"learning_rate": 9.481198897667875e-06, |
|
"loss": 0.8417, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.29766926169395447, |
|
"learning_rate": 9.465880538792519e-06, |
|
"loss": 0.6709, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6752136752136753, |
|
"grad_norm": 0.33883240818977356, |
|
"learning_rate": 9.450352056629083e-06, |
|
"loss": 0.6712, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 0.3106386959552765, |
|
"learning_rate": 9.434614181805203e-06, |
|
"loss": 0.6535, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.38624322414398193, |
|
"learning_rate": 9.418667654800607e-06, |
|
"loss": 0.7493, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7008547008547008, |
|
"grad_norm": 0.46079033613204956, |
|
"learning_rate": 9.402513225912273e-06, |
|
"loss": 1.1914, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7094017094017094, |
|
"grad_norm": 0.4166659414768219, |
|
"learning_rate": 9.386151655219137e-06, |
|
"loss": 1.1341, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 0.3459385931491852, |
|
"learning_rate": 9.369583712546322e-06, |
|
"loss": 1.1233, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7264957264957265, |
|
"grad_norm": 0.303739994764328, |
|
"learning_rate": 9.352810177428917e-06, |
|
"loss": 0.6361, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7350427350427351, |
|
"grad_norm": 0.31175675988197327, |
|
"learning_rate": 9.335831839075303e-06, |
|
"loss": 0.5938, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7435897435897436, |
|
"grad_norm": 0.3335458040237427, |
|
"learning_rate": 9.318649496330021e-06, |
|
"loss": 0.673, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7521367521367521, |
|
"grad_norm": 0.5561854839324951, |
|
"learning_rate": 9.30126395763618e-06, |
|
"loss": 1.0438, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7606837606837606, |
|
"grad_norm": 0.39674779772758484, |
|
"learning_rate": 9.283676040997426e-06, |
|
"loss": 1.2274, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.46839889883995056, |
|
"learning_rate": 9.265886573939448e-06, |
|
"loss": 1.0736, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.329444020986557, |
|
"learning_rate": 9.247896393471045e-06, |
|
"loss": 0.6996, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7863247863247863, |
|
"grad_norm": 0.37539413571357727, |
|
"learning_rate": 9.229706346044749e-06, |
|
"loss": 0.6772, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7948717948717948, |
|
"grad_norm": 0.3232697546482086, |
|
"learning_rate": 9.211317287516985e-06, |
|
"loss": 0.6433, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8034188034188035, |
|
"grad_norm": 0.4283379912376404, |
|
"learning_rate": 9.19273008310782e-06, |
|
"loss": 0.808, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.811965811965812, |
|
"grad_norm": 0.40039879083633423, |
|
"learning_rate": 9.173945607360238e-06, |
|
"loss": 0.6781, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 0.421421617269516, |
|
"learning_rate": 9.154964744099006e-06, |
|
"loss": 1.1649, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8290598290598291, |
|
"grad_norm": 0.37563416361808777, |
|
"learning_rate": 9.135788386389077e-06, |
|
"loss": 0.6748, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8376068376068376, |
|
"grad_norm": 0.34847089648246765, |
|
"learning_rate": 9.116417436493574e-06, |
|
"loss": 1.2002, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.38143283128738403, |
|
"learning_rate": 9.096852805831348e-06, |
|
"loss": 0.8034, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8547008547008547, |
|
"grad_norm": 0.43068060278892517, |
|
"learning_rate": 9.077095414934076e-06, |
|
"loss": 0.7409, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8632478632478633, |
|
"grad_norm": 0.4279479384422302, |
|
"learning_rate": 9.057146193402968e-06, |
|
"loss": 1.0627, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8717948717948718, |
|
"grad_norm": 0.4032224416732788, |
|
"learning_rate": 9.037006079865017e-06, |
|
"loss": 1.1393, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8803418803418803, |
|
"grad_norm": 0.36322587728500366, |
|
"learning_rate": 9.016676021928838e-06, |
|
"loss": 0.9575, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.42848172783851624, |
|
"learning_rate": 8.996156976140088e-06, |
|
"loss": 1.1044, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.38128426671028137, |
|
"learning_rate": 8.975449907936447e-06, |
|
"loss": 1.2012, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.905982905982906, |
|
"grad_norm": 0.8348135948181152, |
|
"learning_rate": 8.95455579160221e-06, |
|
"loss": 1.1161, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9145299145299145, |
|
"grad_norm": 0.599600613117218, |
|
"learning_rate": 8.933475610222435e-06, |
|
"loss": 0.8809, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.34604817628860474, |
|
"learning_rate": 8.91221035563669e-06, |
|
"loss": 1.1079, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9316239316239316, |
|
"grad_norm": 0.6436942219734192, |
|
"learning_rate": 8.890761028392385e-06, |
|
"loss": 1.136, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9401709401709402, |
|
"grad_norm": 0.44971659779548645, |
|
"learning_rate": 8.869128637697702e-06, |
|
"loss": 0.8062, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9487179487179487, |
|
"grad_norm": 0.3893284201622009, |
|
"learning_rate": 8.847314201374102e-06, |
|
"loss": 0.7011, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9572649572649573, |
|
"grad_norm": 0.39437901973724365, |
|
"learning_rate": 8.82531874580844e-06, |
|
"loss": 0.6845, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9658119658119658, |
|
"grad_norm": 0.39099910855293274, |
|
"learning_rate": 8.803143305904676e-06, |
|
"loss": 0.6957, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9743589743589743, |
|
"grad_norm": 0.3814919590950012, |
|
"learning_rate": 8.780788925035178e-06, |
|
"loss": 0.8374, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9829059829059829, |
|
"grad_norm": 0.31528154015541077, |
|
"learning_rate": 8.758256654991627e-06, |
|
"loss": 0.601, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9914529914529915, |
|
"grad_norm": 0.45662426948547363, |
|
"learning_rate": 8.735547555935538e-06, |
|
"loss": 0.7883, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3865978419780731, |
|
"learning_rate": 8.712662696348371e-06, |
|
"loss": 0.6754, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0085470085470085, |
|
"grad_norm": 0.337187260389328, |
|
"learning_rate": 8.689603152981262e-06, |
|
"loss": 0.6326, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.017094017094017, |
|
"grad_norm": 0.38046014308929443, |
|
"learning_rate": 8.666370010804361e-06, |
|
"loss": 0.6708, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 0.42673853039741516, |
|
"learning_rate": 8.642964362955781e-06, |
|
"loss": 0.6928, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0341880341880343, |
|
"grad_norm": 0.45885011553764343, |
|
"learning_rate": 8.619387310690167e-06, |
|
"loss": 0.6886, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0427350427350428, |
|
"grad_norm": 0.4303334057331085, |
|
"learning_rate": 8.59563996332688e-06, |
|
"loss": 1.3497, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0512820512820513, |
|
"grad_norm": 0.5063712000846863, |
|
"learning_rate": 8.5717234381978e-06, |
|
"loss": 1.1424, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0598290598290598, |
|
"grad_norm": 0.43861711025238037, |
|
"learning_rate": 8.547638860594765e-06, |
|
"loss": 1.1289, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0683760683760684, |
|
"grad_norm": 0.43634119629859924, |
|
"learning_rate": 8.523387363716611e-06, |
|
"loss": 0.7524, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0769230769230769, |
|
"grad_norm": 0.3733837604522705, |
|
"learning_rate": 8.498970088615861e-06, |
|
"loss": 0.6589, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.0854700854700854, |
|
"grad_norm": 0.32617077231407166, |
|
"learning_rate": 8.474388184145043e-06, |
|
"loss": 1.2309, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0940170940170941, |
|
"grad_norm": 0.35106804966926575, |
|
"learning_rate": 8.449642806902623e-06, |
|
"loss": 0.6126, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1025641025641026, |
|
"grad_norm": 0.4258238971233368, |
|
"learning_rate": 8.424735121178598e-06, |
|
"loss": 0.6661, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.4120415151119232, |
|
"learning_rate": 8.399666298899706e-06, |
|
"loss": 0.7212, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1196581196581197, |
|
"grad_norm": 0.4039503037929535, |
|
"learning_rate": 8.374437519574296e-06, |
|
"loss": 1.0448, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1282051282051282, |
|
"grad_norm": 0.33159151673316956, |
|
"learning_rate": 8.349049970236822e-06, |
|
"loss": 1.1204, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1367521367521367, |
|
"grad_norm": 0.4609539210796356, |
|
"learning_rate": 8.32350484539199e-06, |
|
"loss": 0.7522, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1452991452991452, |
|
"grad_norm": 0.34498193860054016, |
|
"learning_rate": 8.29780334695857e-06, |
|
"loss": 1.0665, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.3104630410671234, |
|
"learning_rate": 8.271946684212832e-06, |
|
"loss": 0.5928, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1623931623931625, |
|
"grad_norm": 0.4486801326274872, |
|
"learning_rate": 8.245936073731654e-06, |
|
"loss": 0.6778, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.170940170940171, |
|
"grad_norm": 0.41299891471862793, |
|
"learning_rate": 8.219772739335272e-06, |
|
"loss": 1.6928, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1794871794871795, |
|
"grad_norm": 0.41245394945144653, |
|
"learning_rate": 8.193457912029713e-06, |
|
"loss": 0.6847, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.188034188034188, |
|
"grad_norm": 0.3258431553840637, |
|
"learning_rate": 8.166992829948868e-06, |
|
"loss": 0.5718, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.1965811965811965, |
|
"grad_norm": 0.5331162214279175, |
|
"learning_rate": 8.140378738296233e-06, |
|
"loss": 0.7571, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.205128205128205, |
|
"grad_norm": 0.36795511841773987, |
|
"learning_rate": 8.113616889286325e-06, |
|
"loss": 0.6596, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2136752136752136, |
|
"grad_norm": 0.35999539494514465, |
|
"learning_rate": 8.086708542085769e-06, |
|
"loss": 1.1737, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.4550798237323761, |
|
"learning_rate": 8.05965496275404e-06, |
|
"loss": 0.6575, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.43874284625053406, |
|
"learning_rate": 8.032457424183909e-06, |
|
"loss": 0.7127, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2393162393162394, |
|
"grad_norm": 0.39959272742271423, |
|
"learning_rate": 8.005117206041544e-06, |
|
"loss": 0.7145, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2478632478632479, |
|
"grad_norm": 0.4271208941936493, |
|
"learning_rate": 7.977635594706298e-06, |
|
"loss": 0.7244, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2564102564102564, |
|
"grad_norm": 0.4204410910606384, |
|
"learning_rate": 7.950013883210198e-06, |
|
"loss": 0.6295, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.264957264957265, |
|
"grad_norm": 0.40335509181022644, |
|
"learning_rate": 7.922253371177081e-06, |
|
"loss": 0.7212, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.2735042735042734, |
|
"grad_norm": 0.3271823227405548, |
|
"learning_rate": 7.894355364761476e-06, |
|
"loss": 0.9603, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.282051282051282, |
|
"grad_norm": 0.4051213562488556, |
|
"learning_rate": 7.866321176587129e-06, |
|
"loss": 1.1063, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2905982905982907, |
|
"grad_norm": 0.3575092852115631, |
|
"learning_rate": 7.838152125685245e-06, |
|
"loss": 0.5582, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.2991452991452992, |
|
"grad_norm": 0.39023974537849426, |
|
"learning_rate": 7.809849537432432e-06, |
|
"loss": 0.6651, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3076923076923077, |
|
"grad_norm": 0.45742174983024597, |
|
"learning_rate": 7.781414743488338e-06, |
|
"loss": 0.7104, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3162393162393162, |
|
"grad_norm": 0.3916301727294922, |
|
"learning_rate": 7.752849081732993e-06, |
|
"loss": 0.7525, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3247863247863247, |
|
"grad_norm": 0.41341787576675415, |
|
"learning_rate": 7.724153896203868e-06, |
|
"loss": 0.6589, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.36725375056266785, |
|
"learning_rate": 7.695330537032629e-06, |
|
"loss": 0.6316, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.341880341880342, |
|
"grad_norm": 0.39961159229278564, |
|
"learning_rate": 7.666380360381616e-06, |
|
"loss": 1.2004, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.3504273504273505, |
|
"grad_norm": 0.5076507925987244, |
|
"learning_rate": 7.637304728380036e-06, |
|
"loss": 0.7745, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.358974358974359, |
|
"grad_norm": 0.47983452677726746, |
|
"learning_rate": 7.608105009059867e-06, |
|
"loss": 0.8066, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.3675213675213675, |
|
"grad_norm": 0.4021775722503662, |
|
"learning_rate": 7.578782576291501e-06, |
|
"loss": 0.5962, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.376068376068376, |
|
"grad_norm": 0.5335017442703247, |
|
"learning_rate": 7.5493388097190915e-06, |
|
"loss": 0.8891, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.3846153846153846, |
|
"grad_norm": 0.42396119236946106, |
|
"learning_rate": 7.51977509469565e-06, |
|
"loss": 0.6718, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.393162393162393, |
|
"grad_norm": 0.4155985713005066, |
|
"learning_rate": 7.490092822217856e-06, |
|
"loss": 0.9571, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4017094017094016, |
|
"grad_norm": 0.5259201526641846, |
|
"learning_rate": 7.460293388860616e-06, |
|
"loss": 0.8977, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4102564102564101, |
|
"grad_norm": 0.4060882031917572, |
|
"learning_rate": 7.4303781967113494e-06, |
|
"loss": 0.7018, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4188034188034189, |
|
"grad_norm": 0.32119300961494446, |
|
"learning_rate": 7.400348653304022e-06, |
|
"loss": 0.5588, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4273504273504274, |
|
"grad_norm": 0.42005738615989685, |
|
"learning_rate": 7.370206171552914e-06, |
|
"loss": 0.9614, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.435897435897436, |
|
"grad_norm": 0.3684864938259125, |
|
"learning_rate": 7.3399521696861505e-06, |
|
"loss": 0.6402, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.4385385811328888, |
|
"learning_rate": 7.309588071178968e-06, |
|
"loss": 0.6126, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.452991452991453, |
|
"grad_norm": 0.414637953042984, |
|
"learning_rate": 7.2791153046867344e-06, |
|
"loss": 0.7569, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4615384615384617, |
|
"grad_norm": 0.3897780179977417, |
|
"learning_rate": 7.248535303977739e-06, |
|
"loss": 0.6537, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.4700854700854702, |
|
"grad_norm": 0.37271955609321594, |
|
"learning_rate": 7.217849507865724e-06, |
|
"loss": 0.6451, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.4786324786324787, |
|
"grad_norm": 0.4022608697414398, |
|
"learning_rate": 7.187059360142194e-06, |
|
"loss": 0.7482, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.4871794871794872, |
|
"grad_norm": 0.3631649315357208, |
|
"learning_rate": 7.156166309508482e-06, |
|
"loss": 0.9156, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4957264957264957, |
|
"grad_norm": 0.4740133583545685, |
|
"learning_rate": 7.125171809507581e-06, |
|
"loss": 0.6974, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5042735042735043, |
|
"grad_norm": 0.49716681241989136, |
|
"learning_rate": 7.094077318455762e-06, |
|
"loss": 1.2114, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.5128205128205128, |
|
"grad_norm": 0.449844628572464, |
|
"learning_rate": 7.062884299373955e-06, |
|
"loss": 0.6517, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5213675213675213, |
|
"grad_norm": 0.38638660311698914, |
|
"learning_rate": 7.031594219918916e-06, |
|
"loss": 0.6244, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5299145299145298, |
|
"grad_norm": 0.44147396087646484, |
|
"learning_rate": 7.000208552314166e-06, |
|
"loss": 0.5929, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.36652877926826477, |
|
"learning_rate": 6.96872877328073e-06, |
|
"loss": 0.6289, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.547008547008547, |
|
"grad_norm": 0.353456974029541, |
|
"learning_rate": 6.937156363967647e-06, |
|
"loss": 0.5993, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.4075149893760681, |
|
"learning_rate": 6.905492809882286e-06, |
|
"loss": 1.1381, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.564102564102564, |
|
"grad_norm": 0.375399649143219, |
|
"learning_rate": 6.873739600820457e-06, |
|
"loss": 0.5813, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.5726495726495726, |
|
"grad_norm": 0.5181817412376404, |
|
"learning_rate": 6.841898230796302e-06, |
|
"loss": 0.7546, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.5811965811965814, |
|
"grad_norm": 0.40129345655441284, |
|
"learning_rate": 6.809970197972014e-06, |
|
"loss": 0.666, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.5897435897435899, |
|
"grad_norm": 0.44013726711273193, |
|
"learning_rate": 6.777957004587332e-06, |
|
"loss": 0.6635, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5982905982905984, |
|
"grad_norm": 0.9672113060951233, |
|
"learning_rate": 6.745860156888878e-06, |
|
"loss": 1.0801, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.606837606837607, |
|
"grad_norm": 0.6321570873260498, |
|
"learning_rate": 6.713681165059271e-06, |
|
"loss": 0.6552, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6153846153846154, |
|
"grad_norm": 0.42415156960487366, |
|
"learning_rate": 6.68142154314608e-06, |
|
"loss": 0.7572, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.623931623931624, |
|
"grad_norm": 0.41570088267326355, |
|
"learning_rate": 6.6490828089905854e-06, |
|
"loss": 0.9682, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6324786324786325, |
|
"grad_norm": 0.7180127501487732, |
|
"learning_rate": 6.616666484156358e-06, |
|
"loss": 0.9209, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.641025641025641, |
|
"grad_norm": 0.41402408480644226, |
|
"learning_rate": 6.584174093857676e-06, |
|
"loss": 0.6809, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.6495726495726495, |
|
"grad_norm": 0.4954575002193451, |
|
"learning_rate": 6.551607166887761e-06, |
|
"loss": 0.7514, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.658119658119658, |
|
"grad_norm": 0.9597253799438477, |
|
"learning_rate": 6.5189672355468415e-06, |
|
"loss": 0.5929, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.49050456285476685, |
|
"learning_rate": 6.486255835570063e-06, |
|
"loss": 0.6365, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.6752136752136753, |
|
"grad_norm": 0.3644927144050598, |
|
"learning_rate": 6.453474506055228e-06, |
|
"loss": 0.9474, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.6837606837606838, |
|
"grad_norm": 0.41037657856941223, |
|
"learning_rate": 6.420624789390378e-06, |
|
"loss": 0.7692, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.6923076923076923, |
|
"grad_norm": 0.33042111992836, |
|
"learning_rate": 6.387708231181229e-06, |
|
"loss": 0.644, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7008547008547008, |
|
"grad_norm": 0.4650563597679138, |
|
"learning_rate": 6.354726380178442e-06, |
|
"loss": 1.209, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7094017094017095, |
|
"grad_norm": 0.41142725944519043, |
|
"learning_rate": 6.3216807882047585e-06, |
|
"loss": 0.7169, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.717948717948718, |
|
"grad_norm": 0.48036524653434753, |
|
"learning_rate": 6.288573010081984e-06, |
|
"loss": 0.7699, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7264957264957266, |
|
"grad_norm": 0.6143119931221008, |
|
"learning_rate": 6.255404603557833e-06, |
|
"loss": 0.7177, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.735042735042735, |
|
"grad_norm": 0.42116302251815796, |
|
"learning_rate": 6.222177129232634e-06, |
|
"loss": 0.6262, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.7435897435897436, |
|
"grad_norm": 0.42195364832878113, |
|
"learning_rate": 6.188892150485904e-06, |
|
"loss": 0.9916, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.7521367521367521, |
|
"grad_norm": 0.46677255630493164, |
|
"learning_rate": 6.155551233402789e-06, |
|
"loss": 1.2428, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.7606837606837606, |
|
"grad_norm": 0.5056412816047668, |
|
"learning_rate": 6.122155946700381e-06, |
|
"loss": 0.744, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.7692307692307692, |
|
"grad_norm": 0.5227958559989929, |
|
"learning_rate": 6.088707861653904e-06, |
|
"loss": 0.7133, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.4398983418941498, |
|
"learning_rate": 6.0552085520227875e-06, |
|
"loss": 0.5776, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.7863247863247862, |
|
"grad_norm": 0.42121821641921997, |
|
"learning_rate": 6.021659593976621e-06, |
|
"loss": 0.6745, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.7948717948717947, |
|
"grad_norm": 0.4671107232570648, |
|
"learning_rate": 5.988062566020987e-06, |
|
"loss": 0.7452, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8034188034188035, |
|
"grad_norm": 0.45300018787384033, |
|
"learning_rate": 5.954419048923202e-06, |
|
"loss": 0.7965, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.811965811965812, |
|
"grad_norm": 0.4954420030117035, |
|
"learning_rate": 5.920730625637934e-06, |
|
"loss": 0.8834, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8205128205128205, |
|
"grad_norm": 0.5425894260406494, |
|
"learning_rate": 5.886998881232715e-06, |
|
"loss": 0.7124, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.8290598290598292, |
|
"grad_norm": 0.40424826741218567, |
|
"learning_rate": 5.853225402813381e-06, |
|
"loss": 0.713, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.8376068376068377, |
|
"grad_norm": 0.3879939019680023, |
|
"learning_rate": 5.819411779449381e-06, |
|
"loss": 0.5891, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.44357284903526306, |
|
"learning_rate": 5.785559602099019e-06, |
|
"loss": 0.6287, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.8547008547008548, |
|
"grad_norm": 0.3938916325569153, |
|
"learning_rate": 5.751670463534594e-06, |
|
"loss": 0.7215, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.8632478632478633, |
|
"grad_norm": 0.39076554775238037, |
|
"learning_rate": 5.7177459582674595e-06, |
|
"loss": 0.6089, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.8717948717948718, |
|
"grad_norm": 0.43660053610801697, |
|
"learning_rate": 5.683787682473003e-06, |
|
"loss": 0.6207, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.8803418803418803, |
|
"grad_norm": 0.46270671486854553, |
|
"learning_rate": 5.649797233915539e-06, |
|
"loss": 0.6384, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.5016070604324341, |
|
"learning_rate": 5.615776211873142e-06, |
|
"loss": 0.7007, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.8974358974358974, |
|
"grad_norm": 0.4464798867702484, |
|
"learning_rate": 5.5817262170623865e-06, |
|
"loss": 0.6267, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9059829059829059, |
|
"grad_norm": 0.47871559858322144, |
|
"learning_rate": 5.547648851563047e-06, |
|
"loss": 0.6108, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.9145299145299144, |
|
"grad_norm": 0.4208378791809082, |
|
"learning_rate": 5.513545718742702e-06, |
|
"loss": 0.6503, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.4062391519546509, |
|
"learning_rate": 5.479418423181311e-06, |
|
"loss": 0.922, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.9316239316239316, |
|
"grad_norm": 0.4971669018268585, |
|
"learning_rate": 5.4452685705957084e-06, |
|
"loss": 0.6235, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.9401709401709402, |
|
"grad_norm": 0.45603546500205994, |
|
"learning_rate": 5.411097767764053e-06, |
|
"loss": 0.9878, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.9487179487179487, |
|
"grad_norm": 0.419859915971756, |
|
"learning_rate": 5.376907622450229e-06, |
|
"loss": 0.5956, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.9572649572649574, |
|
"grad_norm": 0.5258283615112305, |
|
"learning_rate": 5.342699743328203e-06, |
|
"loss": 0.6999, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.965811965811966, |
|
"grad_norm": 0.46300017833709717, |
|
"learning_rate": 5.308475739906329e-06, |
|
"loss": 0.7178, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.9743589743589745, |
|
"grad_norm": 0.5326732993125916, |
|
"learning_rate": 5.2742372224516235e-06, |
|
"loss": 0.6377, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.982905982905983, |
|
"grad_norm": 0.4621569812297821, |
|
"learning_rate": 5.2399858019140005e-06, |
|
"loss": 0.6213, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.9914529914529915, |
|
"grad_norm": 0.43373093008995056, |
|
"learning_rate": 5.205723089850472e-06, |
|
"loss": 0.6279, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.738778829574585, |
|
"learning_rate": 5.171450698349329e-06, |
|
"loss": 0.7957, |
|
"step": 234 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 468, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 117, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.702116131294413e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|