|
{ |
|
"best_metric": 1.284598708152771, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.006826113296415437, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 3.413056648207719e-05, |
|
"grad_norm": 0.44594427943229675, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2863, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 3.413056648207719e-05, |
|
"eval_loss": 1.5722088813781738, |
|
"eval_runtime": 1610.3713, |
|
"eval_samples_per_second": 30.643, |
|
"eval_steps_per_second": 7.661, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 6.826113296415438e-05, |
|
"grad_norm": 0.46026965975761414, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2411, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00010239169944623155, |
|
"grad_norm": 0.6382265090942383, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4992, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00013652226592830876, |
|
"grad_norm": 0.5791121125221252, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3482, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00017065283241038593, |
|
"grad_norm": 0.5219406485557556, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2532, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0002047833988924631, |
|
"grad_norm": 0.5231647491455078, |
|
"learning_rate": 6e-05, |
|
"loss": 1.2907, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0002389139653745403, |
|
"grad_norm": 0.3957732915878296, |
|
"learning_rate": 7e-05, |
|
"loss": 1.183, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0002730445318566175, |
|
"grad_norm": 0.400023877620697, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1117, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0003071750983386947, |
|
"grad_norm": 0.47502192854881287, |
|
"learning_rate": 9e-05, |
|
"loss": 1.1937, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00034130566482077187, |
|
"grad_norm": 0.4597507119178772, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2562, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00037543623130284904, |
|
"grad_norm": 0.4608520567417145, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.2301, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0004095667977849262, |
|
"grad_norm": 0.4520881175994873, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.2709, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.00044369736426700345, |
|
"grad_norm": 0.41737860441207886, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.1905, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0004778279307490806, |
|
"grad_norm": 0.3545536994934082, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.3463, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0005119584972311578, |
|
"grad_norm": 0.3553808033466339, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.216, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.000546089063713235, |
|
"grad_norm": 0.40968960523605347, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.3641, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0005802196301953121, |
|
"grad_norm": 0.42688941955566406, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.1641, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0006143501966773894, |
|
"grad_norm": 0.42863839864730835, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.2128, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0006484807631594665, |
|
"grad_norm": 0.42905810475349426, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.1894, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0006826113296415437, |
|
"grad_norm": 0.3967270255088806, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.2708, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.000716741896123621, |
|
"grad_norm": 0.3916252851486206, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.2985, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0007508724626056981, |
|
"grad_norm": 0.3845914602279663, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.2068, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0007850030290877753, |
|
"grad_norm": 0.41597360372543335, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.2379, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0008191335955698524, |
|
"grad_norm": 0.43235287070274353, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.2474, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0008532641620519297, |
|
"grad_norm": 0.43992671370506287, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.2688, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0008873947285340069, |
|
"grad_norm": 0.4149838089942932, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.186, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.000921525295016084, |
|
"grad_norm": 0.42388981580734253, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.2362, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0009556558614981612, |
|
"grad_norm": 0.43185508251190186, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.2924, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0009897864279802384, |
|
"grad_norm": 0.5838025808334351, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.5337, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0010239169944623156, |
|
"grad_norm": 0.44131457805633545, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.5396, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0010580475609443928, |
|
"grad_norm": 0.4512113928794861, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.2793, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.00109217812742647, |
|
"grad_norm": 0.46696820855140686, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.1158, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.001126308693908547, |
|
"grad_norm": 0.44508466124534607, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.2374, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0011604392603906243, |
|
"grad_norm": 0.4683876037597656, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.1863, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0011945698268727015, |
|
"grad_norm": 0.5023918151855469, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.3757, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0012287003933547788, |
|
"grad_norm": 0.4710521697998047, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.188, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.001262830959836856, |
|
"grad_norm": 0.5157263278961182, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.5428, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.001296961526318933, |
|
"grad_norm": 0.518281102180481, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.421, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0013310920928010102, |
|
"grad_norm": 0.5908358693122864, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.4305, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0013652226592830875, |
|
"grad_norm": 0.5308020114898682, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.3517, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0013993532257651647, |
|
"grad_norm": 0.5744770169258118, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.389, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.001433483792247242, |
|
"grad_norm": 0.6788293719291687, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.3143, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.001467614358729319, |
|
"grad_norm": 0.5195122361183167, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.1217, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0015017449252113962, |
|
"grad_norm": 0.6682026982307434, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.4026, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0015358754916934734, |
|
"grad_norm": 0.690555214881897, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.4181, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0015700060581755506, |
|
"grad_norm": 0.745463490486145, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.3662, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0016041366246576279, |
|
"grad_norm": 0.6858728528022766, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.5098, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0016382671911397049, |
|
"grad_norm": 0.7358869910240173, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.3523, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.001672397757621782, |
|
"grad_norm": 0.7893625497817993, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.5544, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0017065283241038593, |
|
"grad_norm": 0.8945049047470093, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.4609, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0017065283241038593, |
|
"eval_loss": 1.3237718343734741, |
|
"eval_runtime": 1618.0666, |
|
"eval_samples_per_second": 30.498, |
|
"eval_steps_per_second": 7.625, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0017406588905859366, |
|
"grad_norm": 0.31223592162132263, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 1.0558, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0017747894570680138, |
|
"grad_norm": 0.3674425780773163, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.1294, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0018089200235500908, |
|
"grad_norm": 0.42357581853866577, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 1.2837, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.001843050590032168, |
|
"grad_norm": 0.38247671723365784, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.1485, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0018771811565142453, |
|
"grad_norm": 0.3712633550167084, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 1.2744, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0019113117229963225, |
|
"grad_norm": 0.3071327805519104, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 1.1759, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0019454422894783997, |
|
"grad_norm": 0.3012136220932007, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 1.2486, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0019795728559604767, |
|
"grad_norm": 0.30189332365989685, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 1.3821, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.002013703422442554, |
|
"grad_norm": 0.31529420614242554, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 1.023, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.002047833988924631, |
|
"grad_norm": 0.328961580991745, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.2987, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0020819645554067084, |
|
"grad_norm": 0.36653995513916016, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 1.1651, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0021160951218887857, |
|
"grad_norm": 0.30454298853874207, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 1.1648, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.002150225688370863, |
|
"grad_norm": 0.32735389471054077, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 1.0601, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00218435625485294, |
|
"grad_norm": 0.33563855290412903, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.15, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.002218486821335017, |
|
"grad_norm": 0.342314749956131, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.1782, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.002252617387817094, |
|
"grad_norm": 0.37911802530288696, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.3051, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0022867479542991714, |
|
"grad_norm": 0.3477848470211029, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.3239, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0023208785207812486, |
|
"grad_norm": 0.40505796670913696, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 1.2536, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.002355009087263326, |
|
"grad_norm": 0.3553071916103363, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 1.2349, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.002389139653745403, |
|
"grad_norm": 0.359052836894989, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 1.3748, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0024232702202274803, |
|
"grad_norm": 0.3827054500579834, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.3906, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0024574007867095575, |
|
"grad_norm": 0.4195806682109833, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.2736, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0024915313531916347, |
|
"grad_norm": 0.36370524764060974, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.3726, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.002525661919673712, |
|
"grad_norm": 0.37772655487060547, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.2047, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0025597924861557888, |
|
"grad_norm": 0.4216993451118469, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.3595, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.002593923052637866, |
|
"grad_norm": 0.485747754573822, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 1.3527, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0026280536191199432, |
|
"grad_norm": 0.3874759078025818, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.2473, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0026621841856020205, |
|
"grad_norm": 0.41956737637519836, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 1.3199, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0026963147520840977, |
|
"grad_norm": 0.4455489218235016, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 1.3745, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.002730445318566175, |
|
"grad_norm": 0.46923041343688965, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.2943, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.002764575885048252, |
|
"grad_norm": 0.4096168577671051, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.3311, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0027987064515303294, |
|
"grad_norm": 0.4408420920372009, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.2125, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0028328370180124066, |
|
"grad_norm": 0.4137131869792938, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 1.3847, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.002866967584494484, |
|
"grad_norm": 0.46820157766342163, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.468, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0029010981509765606, |
|
"grad_norm": 0.4669029712677002, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.4026, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.002935228717458638, |
|
"grad_norm": 0.4239863455295563, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.3297, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.002969359283940715, |
|
"grad_norm": 0.5485544800758362, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.2713, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0030034898504227923, |
|
"grad_norm": 0.4871569573879242, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.3044, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0030376204169048696, |
|
"grad_norm": 0.45997199416160583, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.2952, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.003071750983386947, |
|
"grad_norm": 0.5230398774147034, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.3958, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.003105881549869024, |
|
"grad_norm": 0.5710857510566711, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 1.3733, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0031400121163511012, |
|
"grad_norm": 0.5563080906867981, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.2037, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0031741426828331785, |
|
"grad_norm": 0.5526666045188904, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.3215, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0032082732493152557, |
|
"grad_norm": 0.5478617548942566, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.4201, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0032424038157973325, |
|
"grad_norm": 0.6000781655311584, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.5723, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0032765343822794097, |
|
"grad_norm": 0.5676822662353516, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.4396, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.003310664948761487, |
|
"grad_norm": 0.5717068314552307, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.4501, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.003344795515243564, |
|
"grad_norm": 0.7923446297645569, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.4407, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0033789260817256414, |
|
"grad_norm": 0.7019022107124329, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.5061, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0034130566482077187, |
|
"grad_norm": 0.753588855266571, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.4598, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0034130566482077187, |
|
"eval_loss": 1.2931729555130005, |
|
"eval_runtime": 1618.5571, |
|
"eval_samples_per_second": 30.488, |
|
"eval_steps_per_second": 7.622, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.003447187214689796, |
|
"grad_norm": 0.21361371874809265, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.1346, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.003481317781171873, |
|
"grad_norm": 0.2689608335494995, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 1.2523, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.0035154483476539503, |
|
"grad_norm": 0.2706867456436157, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 1.0295, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0035495789141360276, |
|
"grad_norm": 0.3404642641544342, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.0159, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0035837094806181044, |
|
"grad_norm": 0.3048684895038605, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1554, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0036178400471001816, |
|
"grad_norm": 0.30554166436195374, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 1.193, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.003651970613582259, |
|
"grad_norm": 0.2992115616798401, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 1.0538, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.003686101180064336, |
|
"grad_norm": 0.311595618724823, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 1.2201, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.0037202317465464133, |
|
"grad_norm": 0.2934740483760834, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 1.218, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0037543623130284905, |
|
"grad_norm": 0.32242047786712646, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 1.1889, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0037884928795105677, |
|
"grad_norm": 0.30930933356285095, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.0954, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.003822623445992645, |
|
"grad_norm": 0.3188520073890686, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 1.2221, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.003856754012474722, |
|
"grad_norm": 0.37747716903686523, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 1.1932, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.0038908845789567994, |
|
"grad_norm": 0.315521240234375, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.174, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.003925015145438877, |
|
"grad_norm": 0.36168909072875977, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 1.4362, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0039591457119209535, |
|
"grad_norm": 0.3890553116798401, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 1.2651, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.003993276278403031, |
|
"grad_norm": 0.372367799282074, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 1.1706, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.004027406844885108, |
|
"grad_norm": 0.3782535195350647, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 1.3138, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.004061537411367186, |
|
"grad_norm": 0.3837883174419403, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 1.3965, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.004095667977849262, |
|
"grad_norm": 0.34615853428840637, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 1.1693, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.004129798544331339, |
|
"grad_norm": 0.4134136140346527, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 1.3328, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.004163929110813417, |
|
"grad_norm": 0.44440752267837524, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 1.2767, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.004198059677295494, |
|
"grad_norm": 0.36634060740470886, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 1.1117, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.004232190243777571, |
|
"grad_norm": 0.392229825258255, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.2552, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.004266320810259648, |
|
"grad_norm": 0.3841002583503723, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 1.2483, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.004300451376741726, |
|
"grad_norm": 0.39788320660591125, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 1.3653, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0043345819432238026, |
|
"grad_norm": 0.40383124351501465, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 1.2413, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.00436871250970588, |
|
"grad_norm": 0.4268058240413666, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 1.3326, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.004402843076187957, |
|
"grad_norm": 0.41969579458236694, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 1.4618, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.004436973642670034, |
|
"grad_norm": 0.40193435549736023, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 1.3925, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0044711042091521115, |
|
"grad_norm": 0.44596028327941895, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.3146, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.004505234775634188, |
|
"grad_norm": 0.42286020517349243, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.3824, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.004539365342116266, |
|
"grad_norm": 0.5087577104568481, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 1.4045, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.004573495908598343, |
|
"grad_norm": 0.4941883087158203, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.2844, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.00460762647508042, |
|
"grad_norm": 0.4566398859024048, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 1.3141, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.004641757041562497, |
|
"grad_norm": 0.48468443751335144, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 1.5077, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.004675887608044575, |
|
"grad_norm": 0.46569719910621643, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.4478, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.004710018174526652, |
|
"grad_norm": 0.49201467633247375, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.3089, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.004744148741008729, |
|
"grad_norm": 0.505203902721405, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 1.324, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.004778279307490806, |
|
"grad_norm": 0.5393547415733337, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 1.6897, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.004812409873972883, |
|
"grad_norm": 0.46957552433013916, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 1.1522, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.004846540440454961, |
|
"grad_norm": 0.5212751030921936, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 1.3623, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.004880671006937037, |
|
"grad_norm": 0.5395988821983337, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.3394, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.004914801573419115, |
|
"grad_norm": 0.5298926830291748, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.3597, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.004948932139901192, |
|
"grad_norm": 0.5359431505203247, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.3546, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0049830627063832695, |
|
"grad_norm": 0.6278144121170044, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.365, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.005017193272865346, |
|
"grad_norm": 0.646981418132782, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.4498, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.005051323839347424, |
|
"grad_norm": 0.5855222344398499, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.3973, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.005085454405829501, |
|
"grad_norm": 0.7290293574333191, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.6799, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0051195849723115775, |
|
"grad_norm": 0.8321658968925476, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.6338, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0051195849723115775, |
|
"eval_loss": 1.2857680320739746, |
|
"eval_runtime": 1616.0774, |
|
"eval_samples_per_second": 30.535, |
|
"eval_steps_per_second": 7.634, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.005153715538793655, |
|
"grad_norm": 0.20292364060878754, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 1.071, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.005187846105275732, |
|
"grad_norm": 0.22954697906970978, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 1.1002, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.00522197667175781, |
|
"grad_norm": 0.2702932059764862, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 1.2949, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0052561072382398865, |
|
"grad_norm": 0.44548317790031433, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.2602, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.005290237804721964, |
|
"grad_norm": 0.3724050223827362, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 1.2083, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.005324368371204041, |
|
"grad_norm": 0.3160119354724884, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.2519, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.005358498937686119, |
|
"grad_norm": 0.27721965312957764, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 1.2035, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.005392629504168195, |
|
"grad_norm": 0.2958066761493683, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 1.3437, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.005426760070650273, |
|
"grad_norm": 0.30009925365448, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 1.1908, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.00546089063713235, |
|
"grad_norm": 0.34238624572753906, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 1.2945, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.005495021203614427, |
|
"grad_norm": 0.3141269385814667, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 1.0959, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.005529151770096504, |
|
"grad_norm": 0.29361629486083984, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.0722, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.005563282336578581, |
|
"grad_norm": 0.3349563479423523, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 1.3526, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.005597412903060659, |
|
"grad_norm": 0.35232019424438477, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.3667, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0056315434695427356, |
|
"grad_norm": 0.36815640330314636, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 1.2818, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.005665674036024813, |
|
"grad_norm": 0.4468331038951874, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 1.4774, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.00569980460250689, |
|
"grad_norm": 0.3803599178791046, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.2564, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.005733935168988968, |
|
"grad_norm": 0.3637125492095947, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 1.1024, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.0057680657354710445, |
|
"grad_norm": 0.36403417587280273, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 1.1804, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.005802196301953121, |
|
"grad_norm": 0.40999406576156616, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 1.5286, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.005836326868435199, |
|
"grad_norm": 0.3812612295150757, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 1.3586, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.005870457434917276, |
|
"grad_norm": 0.35636425018310547, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.2715, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.005904588001399353, |
|
"grad_norm": 0.40908992290496826, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 1.4648, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.00593871856788143, |
|
"grad_norm": 0.38302552700042725, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 1.3154, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.005972849134363508, |
|
"grad_norm": 0.3900665044784546, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 1.4876, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.006006979700845585, |
|
"grad_norm": 0.4690256714820862, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 1.3807, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.006041110267327662, |
|
"grad_norm": 0.41209375858306885, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 1.0445, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.006075240833809739, |
|
"grad_norm": 0.39221101999282837, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 1.1444, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.006109371400291816, |
|
"grad_norm": 0.4169705808162689, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 1.2882, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.006143501966773894, |
|
"grad_norm": 0.4122096598148346, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 1.3848, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.00617763253325597, |
|
"grad_norm": 0.3993953764438629, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.2617, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.006211763099738048, |
|
"grad_norm": 0.5413970947265625, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 1.4804, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.006245893666220125, |
|
"grad_norm": 0.4104618430137634, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 1.231, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0062800242327022025, |
|
"grad_norm": 0.4876774251461029, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.2455, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.006314154799184279, |
|
"grad_norm": 0.44276532530784607, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 1.2322, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.006348285365666357, |
|
"grad_norm": 0.4874870777130127, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 1.3524, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.006382415932148434, |
|
"grad_norm": 0.5349838733673096, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 1.231, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.006416546498630511, |
|
"grad_norm": 0.4433724582195282, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 1.2613, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.006450677065112588, |
|
"grad_norm": 0.4934666156768799, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 1.348, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.006484807631594665, |
|
"grad_norm": 0.5140841603279114, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 1.1634, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.006518938198076743, |
|
"grad_norm": 0.5503444075584412, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 1.4757, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.0065530687645588195, |
|
"grad_norm": 0.5057629346847534, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 1.3663, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.006587199331040897, |
|
"grad_norm": 0.5920835733413696, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 1.3937, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.006621329897522974, |
|
"grad_norm": 0.5021880865097046, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.3377, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.006655460464005052, |
|
"grad_norm": 0.6006019711494446, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 1.3383, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.006689591030487128, |
|
"grad_norm": 0.5368791818618774, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 1.2476, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.006723721596969206, |
|
"grad_norm": 0.5480520129203796, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 1.3689, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.006757852163451283, |
|
"grad_norm": 0.6008732318878174, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 1.398, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.00679198272993336, |
|
"grad_norm": 0.6981407403945923, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 1.5422, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.006826113296415437, |
|
"grad_norm": 0.7063643932342529, |
|
"learning_rate": 0.0, |
|
"loss": 1.6255, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.006826113296415437, |
|
"eval_loss": 1.284598708152771, |
|
"eval_runtime": 1618.0775, |
|
"eval_samples_per_second": 30.497, |
|
"eval_steps_per_second": 7.624, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.028005784682496e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|