|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.6807713911252575, |
|
"global_step": 25000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.995319228608875e-05, |
|
"loss": 3.2415, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9906384572177495e-05, |
|
"loss": 3.1877, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9859576858266244e-05, |
|
"loss": 3.16, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.9812769144354994e-05, |
|
"loss": 3.106, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.9766055045871565e-05, |
|
"loss": 3.1023, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.971924733196031e-05, |
|
"loss": 3.0976, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.967243961804906e-05, |
|
"loss": 3.0841, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.96256319041378e-05, |
|
"loss": 3.0686, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.957882419022655e-05, |
|
"loss": 3.0504, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.953211009174312e-05, |
|
"loss": 3.0647, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.948530237783187e-05, |
|
"loss": 2.9605, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.9438494663920614e-05, |
|
"loss": 2.83, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.9391686950009364e-05, |
|
"loss": 2.8551, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.9344972851525935e-05, |
|
"loss": 2.8582, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.9298165137614684e-05, |
|
"loss": 2.8605, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.9251357423703434e-05, |
|
"loss": 2.8591, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.920454970979218e-05, |
|
"loss": 2.8611, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.9157741995880926e-05, |
|
"loss": 2.8545, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.9110934281969676e-05, |
|
"loss": 2.8659, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.906412656805842e-05, |
|
"loss": 2.8525, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.901741246957498e-05, |
|
"loss": 2.868, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.897060475566373e-05, |
|
"loss": 2.7281, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.892379704175248e-05, |
|
"loss": 2.6595, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.887698932784123e-05, |
|
"loss": 2.6782, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.8830181613929975e-05, |
|
"loss": 2.6556, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.8783373900018725e-05, |
|
"loss": 2.6926, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.8736566186107474e-05, |
|
"loss": 2.6984, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.8689758472196224e-05, |
|
"loss": 2.6981, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.8643044373712795e-05, |
|
"loss": 2.702, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.859623665980154e-05, |
|
"loss": 2.689, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.854942894589029e-05, |
|
"loss": 2.6981, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.850262123197904e-05, |
|
"loss": 2.7248, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.845581351806778e-05, |
|
"loss": 2.5182, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.840900580415653e-05, |
|
"loss": 2.5234, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.8362291705673094e-05, |
|
"loss": 2.5263, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.8315483991761844e-05, |
|
"loss": 2.5354, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.826867627785059e-05, |
|
"loss": 2.5474, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.8221868563939336e-05, |
|
"loss": 2.5553, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.8175060850028086e-05, |
|
"loss": 2.5713, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.812834675154466e-05, |
|
"loss": 2.5589, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.8081539037633407e-05, |
|
"loss": 2.5645, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.8034731323722156e-05, |
|
"loss": 2.5756, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.79879236098109e-05, |
|
"loss": 2.5252, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.794120951132747e-05, |
|
"loss": 2.3718, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.789440179741621e-05, |
|
"loss": 2.3778, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.784759408350496e-05, |
|
"loss": 2.3979, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.7800786369593706e-05, |
|
"loss": 2.4116, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.7753978655682455e-05, |
|
"loss": 2.4175, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.7707264557199026e-05, |
|
"loss": 2.441, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.7660456843287776e-05, |
|
"loss": 2.4519, |
|
"step": 25000 |
|
} |
|
], |
|
"max_steps": 534100, |
|
"num_train_epochs": 100, |
|
"total_flos": 1.30646016e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|