|
{ |
|
"best_metric": 2.543463706970215, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.01797187401716314, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00017971874017163139, |
|
"grad_norm": 2.2578516006469727, |
|
"learning_rate": 1e-05, |
|
"loss": 4.1401, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00017971874017163139, |
|
"eval_loss": 4.801219940185547, |
|
"eval_runtime": 367.8891, |
|
"eval_samples_per_second": 25.475, |
|
"eval_steps_per_second": 6.369, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00035943748034326277, |
|
"grad_norm": 2.5639493465423584, |
|
"learning_rate": 2e-05, |
|
"loss": 4.1657, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0005391562205148942, |
|
"grad_norm": 2.663560628890991, |
|
"learning_rate": 3e-05, |
|
"loss": 4.4236, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0007188749606865255, |
|
"grad_norm": 2.5879647731781006, |
|
"learning_rate": 4e-05, |
|
"loss": 4.2015, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008985937008581569, |
|
"grad_norm": 2.953747510910034, |
|
"learning_rate": 5e-05, |
|
"loss": 4.553, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0010783124410297883, |
|
"grad_norm": 2.942636489868164, |
|
"learning_rate": 6e-05, |
|
"loss": 4.6065, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0012580311812014197, |
|
"grad_norm": 2.7334063053131104, |
|
"learning_rate": 7e-05, |
|
"loss": 4.4443, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.001437749921373051, |
|
"grad_norm": 2.822499990463257, |
|
"learning_rate": 8e-05, |
|
"loss": 4.668, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0016174686615446825, |
|
"grad_norm": 2.929219961166382, |
|
"learning_rate": 9e-05, |
|
"loss": 4.6229, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0017971874017163139, |
|
"grad_norm": 2.8781981468200684, |
|
"learning_rate": 0.0001, |
|
"loss": 4.6078, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0019769061418879455, |
|
"grad_norm": 3.6415092945098877, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 4.3571, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0021566248820595766, |
|
"grad_norm": 2.46899151802063, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 4.3218, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0023363436222312082, |
|
"grad_norm": 2.7327287197113037, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 4.6434, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0025160623624028394, |
|
"grad_norm": 3.258862257003784, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 4.4217, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.002695781102574471, |
|
"grad_norm": 3.4570999145507812, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 4.7055, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.002875499842746102, |
|
"grad_norm": 3.934077262878418, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 4.8773, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.003055218582917734, |
|
"grad_norm": 4.6774163246154785, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 5.6978, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.003234937323089365, |
|
"grad_norm": 5.593666076660156, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 4.3083, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0034146560632609966, |
|
"grad_norm": 6.528815746307373, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.6124, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0035943748034326277, |
|
"grad_norm": 5.437340259552002, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 3.3371, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0037740935436042593, |
|
"grad_norm": 3.9103472232818604, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 3.4655, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.003953812283775891, |
|
"grad_norm": 3.729229211807251, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 2.9374, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.004133531023947522, |
|
"grad_norm": 3.5457568168640137, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 2.954, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.004313249764119153, |
|
"grad_norm": 3.179713487625122, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 2.623, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.004492968504290785, |
|
"grad_norm": 2.981762409210205, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 2.6878, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0046726872444624165, |
|
"grad_norm": 2.9266059398651123, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 2.7867, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004852405984634048, |
|
"grad_norm": 2.7733054161071777, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.6444, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.005032124724805679, |
|
"grad_norm": 3.0727224349975586, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.6041, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.005211843464977311, |
|
"grad_norm": 2.8358943462371826, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.3593, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.005391562205148942, |
|
"grad_norm": 2.822882890701294, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.3585, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005571280945320573, |
|
"grad_norm": 3.0162363052368164, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 2.7616, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005750999685492204, |
|
"grad_norm": 2.5073037147521973, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.2075, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005930718425663836, |
|
"grad_norm": 2.8125205039978027, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 2.7952, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.006110437165835468, |
|
"grad_norm": 2.533489227294922, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.4959, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.006290155906007099, |
|
"grad_norm": 2.484999179840088, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.3936, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.00646987464617873, |
|
"grad_norm": 2.747763156890869, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.757, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.006649593386350362, |
|
"grad_norm": 2.4683239459991455, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.2322, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.006829312126521993, |
|
"grad_norm": 2.535304546356201, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 2.2977, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.007009030866693624, |
|
"grad_norm": 2.9011669158935547, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 2.6045, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0071887496068652555, |
|
"grad_norm": 2.4643850326538086, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.3071, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0073684683470368875, |
|
"grad_norm": 2.931058645248413, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 2.8353, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.007548187087208519, |
|
"grad_norm": 2.6232857704162598, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 2.4672, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00772790582738015, |
|
"grad_norm": 2.5271997451782227, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.4418, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.007907624567551782, |
|
"grad_norm": 2.7675743103027344, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 2.3251, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.008087343307723413, |
|
"grad_norm": 2.661940813064575, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 2.2531, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.008267062047895044, |
|
"grad_norm": 2.481865406036377, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.9369, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.008446780788066675, |
|
"grad_norm": 2.444103956222534, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.0198, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.008626499528238307, |
|
"grad_norm": 3.11004376411438, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 2.026, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.008806218268409938, |
|
"grad_norm": 2.6639597415924072, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 2.2112, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.00898593700858157, |
|
"grad_norm": 2.8253867626190186, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.3106, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00898593700858157, |
|
"eval_loss": 2.702011823654175, |
|
"eval_runtime": 369.1223, |
|
"eval_samples_per_second": 25.39, |
|
"eval_steps_per_second": 6.347, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.009165655748753202, |
|
"grad_norm": 4.447997570037842, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 3.1439, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.009345374488924833, |
|
"grad_norm": 4.879358291625977, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 3.0015, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.009525093229096464, |
|
"grad_norm": 3.736008882522583, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 2.9797, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.009704811969268095, |
|
"grad_norm": 3.3615059852600098, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 3.3752, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.009884530709439726, |
|
"grad_norm": 2.5511536598205566, |
|
"learning_rate": 5e-05, |
|
"loss": 2.8825, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.010064249449611358, |
|
"grad_norm": 2.8620405197143555, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 2.9289, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.010243968189782989, |
|
"grad_norm": 2.905618190765381, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 3.1637, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.010423686929954622, |
|
"grad_norm": 3.0646848678588867, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 3.0125, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.010603405670126253, |
|
"grad_norm": 3.3001129627227783, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 3.1014, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.010783124410297884, |
|
"grad_norm": 3.306954860687256, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 3.4818, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.010962843150469515, |
|
"grad_norm": 3.376537561416626, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 3.5095, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.011142561890641146, |
|
"grad_norm": 3.216480016708374, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.4539, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.011322280630812778, |
|
"grad_norm": 3.4549150466918945, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.3442, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.011501999370984409, |
|
"grad_norm": 3.040135145187378, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.3466, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.011681718111156042, |
|
"grad_norm": 3.064269542694092, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 3.2623, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.011861436851327673, |
|
"grad_norm": 3.851158618927002, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 3.8449, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.012041155591499304, |
|
"grad_norm": 3.46075177192688, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 3.4347, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.012220874331670935, |
|
"grad_norm": 3.6675915718078613, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 4.0072, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.012400593071842566, |
|
"grad_norm": 3.480902671813965, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 2.4953, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.012580311812014197, |
|
"grad_norm": 4.579424858093262, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 2.6752, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.012760030552185829, |
|
"grad_norm": 3.595804214477539, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 2.4067, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.01293974929235746, |
|
"grad_norm": 3.2332661151885986, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 2.1416, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.013119468032529093, |
|
"grad_norm": 3.2234134674072266, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.7848, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.013299186772700724, |
|
"grad_norm": 3.04959774017334, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 2.432, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.013478905512872355, |
|
"grad_norm": 2.643514633178711, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.0934, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.013658624253043986, |
|
"grad_norm": 3.055098056793213, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 2.3272, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.013838342993215617, |
|
"grad_norm": 2.4809956550598145, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.2483, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.014018061733387249, |
|
"grad_norm": 2.64147686958313, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 2.3055, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.01419778047355888, |
|
"grad_norm": 2.2932167053222656, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.0433, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.014377499213730511, |
|
"grad_norm": 2.3609931468963623, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 2.3359, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.014557217953902144, |
|
"grad_norm": 2.249638080596924, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 2.1387, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.014736936694073775, |
|
"grad_norm": 2.8411879539489746, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.463, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.014916655434245406, |
|
"grad_norm": 2.662808418273926, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 2.5389, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.015096374174417037, |
|
"grad_norm": 2.8071436882019043, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.1706, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.015276092914588668, |
|
"grad_norm": 2.656956911087036, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.3759, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0154558116547603, |
|
"grad_norm": 3.031149387359619, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 2.2298, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.015635530394931933, |
|
"grad_norm": 2.755890369415283, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 2.4255, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.015815249135103564, |
|
"grad_norm": 2.5731019973754883, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 2.2818, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.015994967875275195, |
|
"grad_norm": 2.7685189247131348, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.3691, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.016174686615446826, |
|
"grad_norm": 2.6368041038513184, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 2.4944, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.016354405355618457, |
|
"grad_norm": 2.858224630355835, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.2883, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.01653412409579009, |
|
"grad_norm": 2.793339252471924, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.4953, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.01671384283596172, |
|
"grad_norm": 2.8187801837921143, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 2.2414, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.01689356157613335, |
|
"grad_norm": 2.9361603260040283, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 2.4021, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.017073280316304982, |
|
"grad_norm": 2.7744662761688232, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 2.1991, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.017252999056476613, |
|
"grad_norm": 2.6500556468963623, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 2.2625, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.017432717796648244, |
|
"grad_norm": 2.4501571655273438, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 2.1335, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.017612436536819875, |
|
"grad_norm": 2.76723575592041, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 2.0047, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.017792155276991507, |
|
"grad_norm": 2.838305711746216, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 2.3863, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.01797187401716314, |
|
"grad_norm": 3.0587007999420166, |
|
"learning_rate": 0.0, |
|
"loss": 2.2002, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01797187401716314, |
|
"eval_loss": 2.543463706970215, |
|
"eval_runtime": 368.7, |
|
"eval_samples_per_second": 25.419, |
|
"eval_steps_per_second": 6.355, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.609045705424896e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|