|
{ |
|
"best_metric": 2.786036252975464, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.03195270998921596, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003195270998921596, |
|
"grad_norm": 0.4542631208896637, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 15.1568, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003195270998921596, |
|
"eval_loss": 2.822023630142212, |
|
"eval_runtime": 886.3385, |
|
"eval_samples_per_second": 5.947, |
|
"eval_steps_per_second": 1.487, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006390541997843192, |
|
"grad_norm": 0.4103067219257355, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 15.4511, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0009585812996764789, |
|
"grad_norm": 0.44541919231414795, |
|
"learning_rate": 1.5e-06, |
|
"loss": 12.3824, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0012781083995686384, |
|
"grad_norm": 0.6393632292747498, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 15.0988, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.001597635499460798, |
|
"grad_norm": 0.6381840109825134, |
|
"learning_rate": 2.5e-06, |
|
"loss": 15.25, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0019171625993529577, |
|
"grad_norm": 0.7214181423187256, |
|
"learning_rate": 3e-06, |
|
"loss": 16.3453, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002236689699245117, |
|
"grad_norm": 0.8121290802955627, |
|
"learning_rate": 3.5e-06, |
|
"loss": 17.3123, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.002556216799137277, |
|
"grad_norm": 0.7688579559326172, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 17.9848, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0028757438990294364, |
|
"grad_norm": 0.8018252849578857, |
|
"learning_rate": 4.5e-06, |
|
"loss": 16.204, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003195270998921596, |
|
"grad_norm": 0.8581808805465698, |
|
"learning_rate": 5e-06, |
|
"loss": 17.314, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0035147980988137558, |
|
"grad_norm": 0.8556950092315674, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 16.6607, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0038343251987059154, |
|
"grad_norm": 1.0415091514587402, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 17.439, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004153852298598075, |
|
"grad_norm": 0.9096566438674927, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 17.4097, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.004473379398490234, |
|
"grad_norm": 1.0269314050674438, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 17.4317, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004792906498382394, |
|
"grad_norm": 1.1478707790374756, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 18.5223, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005112433598274554, |
|
"grad_norm": 1.636509895324707, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 18.0751, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.005431960698166713, |
|
"grad_norm": 1.154109001159668, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 17.521, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005751487798058873, |
|
"grad_norm": 1.0234571695327759, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 18.8366, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0060710148979510326, |
|
"grad_norm": 1.3324098587036133, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 19.0732, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.006390541997843192, |
|
"grad_norm": 1.424180030822754, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 19.0163, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006710069097735352, |
|
"grad_norm": 1.2417484521865845, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 19.9384, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0070295961976275115, |
|
"grad_norm": 1.5843204259872437, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 18.4232, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.007349123297519671, |
|
"grad_norm": 1.378356695175171, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 19.6798, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.007668650397411831, |
|
"grad_norm": 1.4657114744186401, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 19.7077, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00798817749730399, |
|
"grad_norm": 1.4137859344482422, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 19.1794, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.00830770459719615, |
|
"grad_norm": 1.708204746246338, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 20.172, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00862723169708831, |
|
"grad_norm": 1.3741956949234009, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 19.5121, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.008946758796980469, |
|
"grad_norm": 1.672687292098999, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 19.3691, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.009266285896872629, |
|
"grad_norm": 1.9943945407867432, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 17.0405, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.009585812996764788, |
|
"grad_norm": 2.3718981742858887, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 17.6282, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009905340096656948, |
|
"grad_norm": 2.2666637897491455, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 17.6547, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.010224867196549107, |
|
"grad_norm": 2.4329729080200195, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 18.943, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.010544394296441268, |
|
"grad_norm": 2.7743804454803467, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 19.7421, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.010863921396333426, |
|
"grad_norm": 2.731886148452759, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 24.2004, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.011183448496225585, |
|
"grad_norm": 2.7625598907470703, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 26.9592, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.011502975596117746, |
|
"grad_norm": 3.5860679149627686, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 28.8936, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.011822502696009905, |
|
"grad_norm": 2.9752390384674072, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 28.598, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.012142029795902065, |
|
"grad_norm": 3.910142183303833, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 29.005, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.012461556895794224, |
|
"grad_norm": 3.509363889694214, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 29.2251, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.012781083995686384, |
|
"grad_norm": 4.890401840209961, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 29.8219, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.013100611095578543, |
|
"grad_norm": 4.316377639770508, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 29.8349, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.013420138195470704, |
|
"grad_norm": 5.008350372314453, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 31.3168, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.013739665295362863, |
|
"grad_norm": 4.941371440887451, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 31.449, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.014059192395255023, |
|
"grad_norm": 6.248507022857666, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 32.5496, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.014378719495147182, |
|
"grad_norm": 5.927119255065918, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 33.2277, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.014698246595039342, |
|
"grad_norm": 6.774079322814941, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 33.6978, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.015017773694931501, |
|
"grad_norm": 7.833065986633301, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 34.0778, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.015337300794823662, |
|
"grad_norm": 8.638693809509277, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 35.4129, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.01565682789471582, |
|
"grad_norm": 10.667224884033203, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 38.1759, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.01597635499460798, |
|
"grad_norm": 14.18898868560791, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 42.3519, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01597635499460798, |
|
"eval_loss": 2.800990581512451, |
|
"eval_runtime": 885.3579, |
|
"eval_samples_per_second": 5.954, |
|
"eval_steps_per_second": 1.489, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01629588209450014, |
|
"grad_norm": 0.4598936140537262, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 14.898, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0166154091943923, |
|
"grad_norm": 0.5623840689659119, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 14.9036, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01693493629428446, |
|
"grad_norm": 0.5559747815132141, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 13.1005, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01725446339417662, |
|
"grad_norm": 0.7431466579437256, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 16.5878, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.017573990494068777, |
|
"grad_norm": 0.8783532977104187, |
|
"learning_rate": 2.5e-06, |
|
"loss": 17.6206, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.017893517593960937, |
|
"grad_norm": 0.8926708698272705, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 16.7598, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.018213044693853098, |
|
"grad_norm": 0.993622899055481, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 18.1749, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.018532571793745258, |
|
"grad_norm": 1.064647912979126, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 16.2944, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.018852098893637415, |
|
"grad_norm": 1.0806492567062378, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 15.8283, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.019171625993529576, |
|
"grad_norm": 1.020936369895935, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 17.8089, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.019491153093421736, |
|
"grad_norm": 1.0202960968017578, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 16.9171, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.019810680193313897, |
|
"grad_norm": 1.1508572101593018, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 17.2636, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.020130207293206054, |
|
"grad_norm": 1.0971193313598633, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 17.0854, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.020449734393098214, |
|
"grad_norm": 1.3636854887008667, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 17.5989, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.020769261492990375, |
|
"grad_norm": 1.2558183670043945, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 17.343, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.021088788592882535, |
|
"grad_norm": 1.7393505573272705, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 18.1173, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.021408315692774692, |
|
"grad_norm": 1.3264778852462769, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 17.7952, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.021727842792666853, |
|
"grad_norm": 1.246366024017334, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 18.808, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.022047369892559013, |
|
"grad_norm": 1.3943363428115845, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 19.9147, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02236689699245117, |
|
"grad_norm": 1.5551766157150269, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 18.7218, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02268642409234333, |
|
"grad_norm": 1.5984143018722534, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 19.9538, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02300595119223549, |
|
"grad_norm": 1.7078524827957153, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 19.0477, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.023325478292127652, |
|
"grad_norm": 1.86801278591156, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 19.7327, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02364500539201981, |
|
"grad_norm": 1.7517181634902954, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 19.406, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02396453249191197, |
|
"grad_norm": 1.7723991870880127, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 19.6767, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02428405959180413, |
|
"grad_norm": 1.7405762672424316, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 18.9852, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02460358669169629, |
|
"grad_norm": 2.2709810733795166, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 16.1077, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.024923113791588448, |
|
"grad_norm": 2.5845401287078857, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 16.7456, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02524264089148061, |
|
"grad_norm": 2.6944775581359863, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 17.1069, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02556216799137277, |
|
"grad_norm": 2.878634214401245, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 18.3942, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02588169509126493, |
|
"grad_norm": 3.0304110050201416, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 17.8819, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.026201222191157086, |
|
"grad_norm": 3.3057451248168945, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 18.5736, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.026520749291049247, |
|
"grad_norm": 3.3953466415405273, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 23.0032, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.026840276390941407, |
|
"grad_norm": 4.374539375305176, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 25.4286, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.027159803490833568, |
|
"grad_norm": 3.490527391433716, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 26.9727, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.027479330590725725, |
|
"grad_norm": 4.934103488922119, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 28.1652, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.027798857690617886, |
|
"grad_norm": 4.795266151428223, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 30.8333, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.028118384790510046, |
|
"grad_norm": 4.224905490875244, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 27.3162, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.028437911890402203, |
|
"grad_norm": 4.559455394744873, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 29.1512, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.028757438990294364, |
|
"grad_norm": 4.907705307006836, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 29.6763, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.029076966090186524, |
|
"grad_norm": 5.672667026519775, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 30.8263, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.029396493190078685, |
|
"grad_norm": 8.072755813598633, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 30.6112, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.02971602028997084, |
|
"grad_norm": 6.152291774749756, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 31.3257, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.030035547389863002, |
|
"grad_norm": 7.361919403076172, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 31.6235, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.030355074489755163, |
|
"grad_norm": 6.712725639343262, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 33.694, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.030674601589647323, |
|
"grad_norm": 8.276151657104492, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 34.3615, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03099412868953948, |
|
"grad_norm": 7.104736804962158, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 34.9578, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03131365578943164, |
|
"grad_norm": 9.987726211547852, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 34.2474, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0316331828893238, |
|
"grad_norm": 10.876480102539062, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 38.6954, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.03195270998921596, |
|
"grad_norm": 15.421370506286621, |
|
"learning_rate": 0.0, |
|
"loss": 41.5902, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03195270998921596, |
|
"eval_loss": 2.786036252975464, |
|
"eval_runtime": 886.1971, |
|
"eval_samples_per_second": 5.948, |
|
"eval_steps_per_second": 1.487, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6533929242591232e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|