| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 711, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 6.304776350824905, | |
| "learning_rate": 2.7777777777777776e-07, | |
| "loss": 1.2835, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 6.1444154892612906, | |
| "learning_rate": 1.3888888888888892e-06, | |
| "loss": 1.2052, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.829285889985009, | |
| "learning_rate": 2.7777777777777783e-06, | |
| "loss": 1.2292, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.1679705762984034, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 1.1597, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.4357027628580525, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 1.1284, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 0.9452863213714234, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 1.1531, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 0.8873033736663324, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.1549, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 0.7881640401882983, | |
| "learning_rate": 9.722222222222223e-06, | |
| "loss": 1.1306, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 0.7249086340452754, | |
| "learning_rate": 1.1111111111111113e-05, | |
| "loss": 1.0851, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 0.6764364880756644, | |
| "learning_rate": 1.25e-05, | |
| "loss": 1.139, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 0.6897544761050797, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 1.1317, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 0.6816142474216428, | |
| "learning_rate": 1.5277777777777777e-05, | |
| "loss": 1.1313, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.6811212493739357, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.138, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.6862844341255663, | |
| "learning_rate": 1.8055555555555558e-05, | |
| "loss": 1.1483, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.4787128965985026, | |
| "learning_rate": 1.9444444444444445e-05, | |
| "loss": 1.1395, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.7775487986058592, | |
| "learning_rate": 1.999891231617599e-05, | |
| "loss": 1.1161, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.7115088691454458, | |
| "learning_rate": 1.9992266216318037e-05, | |
| "loss": 1.0932, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.6818793389006765, | |
| "learning_rate": 1.997958229642588e-05, | |
| "loss": 1.1178, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.6928106880338162, | |
| "learning_rate": 1.996086822074945e-05, | |
| "loss": 1.1298, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.6676196732940871, | |
| "learning_rate": 1.9936135297256183e-05, | |
| "loss": 1.1094, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.713377771129619, | |
| "learning_rate": 1.9905398470798207e-05, | |
| "loss": 1.1224, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.6855059849013185, | |
| "learning_rate": 1.9868676314081907e-05, | |
| "loss": 1.1123, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.660806750689474, | |
| "learning_rate": 1.9825991016445387e-05, | |
| "loss": 1.1044, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.6638994109236709, | |
| "learning_rate": 1.9777368370450582e-05, | |
| "loss": 1.0811, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.6862841124125318, | |
| "learning_rate": 1.9722837756298112e-05, | |
| "loss": 1.1057, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.6713561495513073, | |
| "learning_rate": 1.9662432124074325e-05, | |
| "loss": 1.1247, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.6865654618803174, | |
| "learning_rate": 1.9596187973841218e-05, | |
| "loss": 1.0959, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.6512909442458682, | |
| "learning_rate": 1.9524145333581315e-05, | |
| "loss": 1.1178, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.542422418836288, | |
| "learning_rate": 1.9446347735010765e-05, | |
| "loss": 1.0984, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7206951642675766, | |
| "learning_rate": 1.9362842187275354e-05, | |
| "loss": 1.0812, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.6984462641482525, | |
| "learning_rate": 1.9273679148545246e-05, | |
| "loss": 1.1129, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.6657063460800421, | |
| "learning_rate": 1.917891249552568e-05, | |
| "loss": 1.1147, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6732684805189473, | |
| "learning_rate": 1.9078599490901984e-05, | |
| "loss": 1.0988, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.6969660113279658, | |
| "learning_rate": 1.897280074873868e-05, | |
| "loss": 1.0901, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.6545636951842201, | |
| "learning_rate": 1.8861580197853423e-05, | |
| "loss": 1.0808, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.728879426031119, | |
| "learning_rate": 1.8745005043188104e-05, | |
| "loss": 1.1007, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.6989476047236611, | |
| "learning_rate": 1.862314572520028e-05, | |
| "loss": 1.0972, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6937493330837292, | |
| "learning_rate": 1.8496075877299585e-05, | |
| "loss": 1.0751, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7237715465824409, | |
| "learning_rate": 1.8363872281354796e-05, | |
| "loss": 1.1005, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.665001160804127, | |
| "learning_rate": 1.8226614821298444e-05, | |
| "loss": 1.0979, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.6512333420285555, | |
| "learning_rate": 1.808438643485698e-05, | |
| "loss": 1.0663, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.6670845393437954, | |
| "learning_rate": 1.793727306343574e-05, | |
| "loss": 1.0479, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.660996369149025, | |
| "learning_rate": 1.7785363600188894e-05, | |
| "loss": 1.0631, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.6383025721204546, | |
| "learning_rate": 1.762874983630582e-05, | |
| "loss": 1.0594, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.6481663495293073, | |
| "learning_rate": 1.7467526405546344e-05, | |
| "loss": 1.1117, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.6526574717781632, | |
| "learning_rate": 1.7301790727058344e-05, | |
| "loss": 1.074, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.6602485216423672, | |
| "learning_rate": 1.7131642946512314e-05, | |
| "loss": 1.1002, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.703340110707448, | |
| "learning_rate": 1.6957185875588403e-05, | |
| "loss": 1.0963, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.0788308382034302, | |
| "eval_runtime": 5.3118, | |
| "eval_samples_per_second": 277.497, | |
| "eval_steps_per_second": 4.518, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "grad_norm": 0.7492129279971655, | |
| "learning_rate": 1.6778524929852513e-05, | |
| "loss": 1.028, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 0.8102380476622988, | |
| "learning_rate": 1.659576806505905e-05, | |
| "loss": 0.9794, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.7121110613571635, | |
| "learning_rate": 1.640902571191869e-05, | |
| "loss": 0.9648, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 0.7426962668349942, | |
| "learning_rate": 1.6218410709370735e-05, | |
| "loss": 0.9826, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "grad_norm": 0.675584007232439, | |
| "learning_rate": 1.6024038236400246e-05, | |
| "loss": 0.9454, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.7129690280611058, | |
| "learning_rate": 1.582602574244121e-05, | |
| "loss": 0.9403, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 0.6747033998323487, | |
| "learning_rate": 1.562449287640781e-05, | |
| "loss": 0.9864, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.6837814023465153, | |
| "learning_rate": 1.5419561414396657e-05, | |
| "loss": 0.9864, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 0.6969571002742431, | |
| "learning_rate": 1.5211355186103655e-05, | |
| "loss": 0.9567, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.6630210250243682, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.9811, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 0.6589068053142794, | |
| "learning_rate": 1.4785623567312492e-05, | |
| "loss": 0.9389, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 0.6878317017661794, | |
| "learning_rate": 1.4568355424854113e-05, | |
| "loss": 1.0157, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 0.7179535464777137, | |
| "learning_rate": 1.4348326856751496e-05, | |
| "loss": 0.9787, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 0.6829129430840285, | |
| "learning_rate": 1.412567081511659e-05, | |
| "loss": 0.9639, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 0.7030815356191583, | |
| "learning_rate": 1.3900521839710428e-05, | |
| "loss": 0.9486, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 0.67559393512531, | |
| "learning_rate": 1.367301597664757e-05, | |
| "loss": 0.9795, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 0.6780812345565891, | |
| "learning_rate": 1.3443290696190335e-05, | |
| "loss": 0.9785, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.6710329710920228, | |
| "learning_rate": 1.3211484809682482e-05, | |
| "loss": 0.992, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "grad_norm": 0.6741452885754833, | |
| "learning_rate": 1.2977738385672558e-05, | |
| "loss": 1.0023, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "grad_norm": 0.6632729564225439, | |
| "learning_rate": 1.2742192665277569e-05, | |
| "loss": 0.995, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 0.6601370650300007, | |
| "learning_rate": 1.250498997683813e-05, | |
| "loss": 0.9764, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 0.6652011155656283, | |
| "learning_rate": 1.2266273649916669e-05, | |
| "loss": 0.9537, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 0.6446717094616893, | |
| "learning_rate": 1.202618792869063e-05, | |
| "loss": 0.9525, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.6721085026757567, | |
| "learning_rate": 1.178487788479303e-05, | |
| "loss": 0.9629, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.6629783449328003, | |
| "learning_rate": 1.1542489329653024e-05, | |
| "loss": 0.9842, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "grad_norm": 0.6522335344513943, | |
| "learning_rate": 1.129916872638945e-05, | |
| "loss": 1.0048, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 0.6781899139396206, | |
| "learning_rate": 1.1055063101310581e-05, | |
| "loss": 0.9685, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.6691172096591039, | |
| "learning_rate": 1.08103199550736e-05, | |
| "loss": 0.9747, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.6373869560949246, | |
| "learning_rate": 1.0565087173557396e-05, | |
| "loss": 0.9686, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 0.6517373978992131, | |
| "learning_rate": 1.0319512938502654e-05, | |
| "loss": 0.9714, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "grad_norm": 0.6580577126773488, | |
| "learning_rate": 1.0073745637973125e-05, | |
| "loss": 0.9747, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 0.6489932524871953, | |
| "learning_rate": 9.827933776692236e-06, | |
| "loss": 0.9874, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.6407459560571601, | |
| "learning_rate": 9.582225886309217e-06, | |
| "loss": 0.9618, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "grad_norm": 0.6629610117016943, | |
| "learning_rate": 9.336770435648963e-06, | |
| "loss": 0.992, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "grad_norm": 0.6667119859349727, | |
| "learning_rate": 9.091715740999829e-06, | |
| "loss": 0.971, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.6886862002448931, | |
| "learning_rate": 8.84720987649363e-06, | |
| "loss": 0.9556, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 0.6426160780174832, | |
| "learning_rate": 8.60340058463194e-06, | |
| "loss": 0.9879, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 0.6346609133677081, | |
| "learning_rate": 8.360435187012789e-06, | |
| "loss": 1.0061, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 0.6483257723288812, | |
| "learning_rate": 8.118460495311687e-06, | |
| "loss": 1.0141, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.7171449794590815, | |
| "learning_rate": 7.877622722570772e-06, | |
| "loss": 0.9675, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 0.650114145091982, | |
| "learning_rate": 7.638067394849672e-06, | |
| "loss": 0.9742, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 0.6929157157409924, | |
| "learning_rate": 7.3999392632914936e-06, | |
| "loss": 0.9872, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.6361399427239406, | |
| "learning_rate": 7.163382216657033e-06, | |
| "loss": 0.9685, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.6216240484506025, | |
| "learning_rate": 6.928539194380101e-06, | |
| "loss": 0.9648, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 0.6162980824439727, | |
| "learning_rate": 6.6955521001964526e-06, | |
| "loss": 0.9608, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 0.662006443614462, | |
| "learning_rate": 6.464561716398565e-06, | |
| "loss": 0.9555, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 0.6525504751262218, | |
| "learning_rate": 6.2357076187680325e-06, | |
| "loss": 0.9779, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.0727574825286865, | |
| "eval_runtime": 4.9769, | |
| "eval_samples_per_second": 296.167, | |
| "eval_steps_per_second": 4.822, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.9211908456070792, | |
| "learning_rate": 6.009128092236983e-06, | |
| "loss": 0.9304, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 0.8532374349387694, | |
| "learning_rate": 5.78496004732952e-06, | |
| "loss": 0.8546, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 0.7234550887455584, | |
| "learning_rate": 5.563338937433622e-06, | |
| "loss": 0.8819, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 0.7126403603814161, | |
| "learning_rate": 5.344398676953526e-06, | |
| "loss": 0.9011, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 0.640441506166026, | |
| "learning_rate": 5.128271560392037e-06, | |
| "loss": 0.8714, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.6599554320566688, | |
| "learning_rate": 4.915088182411674e-06, | |
| "loss": 0.8918, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "grad_norm": 0.6420656415982475, | |
| "learning_rate": 4.7049773589229306e-06, | |
| "loss": 0.8936, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "grad_norm": 0.6624281801845274, | |
| "learning_rate": 4.498066049247344e-06, | |
| "loss": 0.9076, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "grad_norm": 0.6494624453990643, | |
| "learning_rate": 4.29447927940242e-06, | |
| "loss": 0.874, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "grad_norm": 0.62980438510625, | |
| "learning_rate": 4.094340066554742e-06, | |
| "loss": 0.8797, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 0.6496965620768959, | |
| "learning_rate": 3.897769344686929e-06, | |
| "loss": 0.8783, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.623988823063145, | |
| "learning_rate": 3.7048858915233665e-06, | |
| "loss": 0.8991, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 0.6235243813957997, | |
| "learning_rate": 3.5158062567588468e-06, | |
| "loss": 0.8587, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 0.6224524258972475, | |
| "learning_rate": 3.330644691633492e-06, | |
| "loss": 0.8742, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "grad_norm": 0.6267167014433769, | |
| "learning_rate": 3.149513079896521e-06, | |
| "loss": 0.9013, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.6252594161210878, | |
| "learning_rate": 2.9725208702005736e-06, | |
| "loss": 0.8905, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 0.6204178434773648, | |
| "learning_rate": 2.7997750099674282e-06, | |
| "loss": 0.8819, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 0.6234939504584522, | |
| "learning_rate": 2.631379880765107e-06, | |
| "loss": 0.8844, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 0.6404757813962814, | |
| "learning_rate": 2.467437235235378e-06, | |
| "loss": 0.8643, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 0.6154655956232923, | |
| "learning_rate": 2.3080461356097938e-06, | |
| "loss": 0.8835, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "grad_norm": 0.6226597834743854, | |
| "learning_rate": 2.153302893851401e-06, | |
| "loss": 0.8547, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "grad_norm": 0.635711672458865, | |
| "learning_rate": 2.0033010134583085e-06, | |
| "loss": 0.8799, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 0.6222216060732768, | |
| "learning_rate": 1.8581311329642592e-06, | |
| "loss": 0.8614, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 0.6211538083313471, | |
| "learning_rate": 1.7178809711703525e-06, | |
| "loss": 0.8851, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 0.6264778347828893, | |
| "learning_rate": 1.5826352741410333e-06, | |
| "loss": 0.8841, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.6194165165737189, | |
| "learning_rate": 1.452475763996326e-06, | |
| "loss": 0.8519, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 0.6156200040809161, | |
| "learning_rate": 1.3274810895313083e-06, | |
| "loss": 0.8595, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 0.6268920763538539, | |
| "learning_rate": 1.207726778692625e-06, | |
| "loss": 0.8668, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "grad_norm": 0.6237602437342175, | |
| "learning_rate": 1.0932851929407828e-06, | |
| "loss": 0.8984, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 0.6098041198161336, | |
| "learning_rate": 9.842254835257792e-07, | |
| "loss": 0.8889, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.6047803478738842, | |
| "learning_rate": 8.806135497025181e-07, | |
| "loss": 0.8712, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 0.6161378090035984, | |
| "learning_rate": 7.825119989112173e-07, | |
| "loss": 0.8885, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 0.6233431696221546, | |
| "learning_rate": 6.899801089469205e-07, | |
| "loss": 0.8852, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 0.6391554220139581, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.8773, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 0.6245630190412572, | |
| "learning_rate": 5.218455615757601e-07, | |
| "loss": 0.8805, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.6100529800808214, | |
| "learning_rate": 4.4634449935427203e-07, | |
| "loss": 0.8605, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 0.6121972838225812, | |
| "learning_rate": 3.7661622694171393e-07, | |
| "loss": 0.9044, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "grad_norm": 0.6334525599084514, | |
| "learning_rate": 3.127028775990515e-07, | |
| "loss": 0.8752, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 0.6246892070368814, | |
| "learning_rate": 2.546430709239578e-07, | |
| "loss": 0.873, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "grad_norm": 0.6189951160404621, | |
| "learning_rate": 2.02471889514948e-07, | |
| "loss": 0.8616, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.612318312224187, | |
| "learning_rate": 1.562208577727442e-07, | |
| "loss": 0.8419, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "grad_norm": 0.597268698517341, | |
| "learning_rate": 1.1591792285167603e-07, | |
| "loss": 0.863, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "grad_norm": 0.6176692124122442, | |
| "learning_rate": 8.158743777263334e-08, | |
| "loss": 0.8801, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "grad_norm": 0.6020045294431875, | |
| "learning_rate": 5.325014670776951e-08, | |
| "loss": 0.8632, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "grad_norm": 0.6387655106971034, | |
| "learning_rate": 3.092317244584919e-08, | |
| "loss": 0.9029, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.6272250942520349, | |
| "learning_rate": 1.4620006045816814e-08, | |
| "loss": 0.9094, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 0.6245247048439003, | |
| "learning_rate": 4.3504986848297295e-09, | |
| "loss": 0.8741, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.6085774534918146, | |
| "learning_rate": 1.2085570569642102e-10, | |
| "loss": 0.8734, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.0886437892913818, | |
| "eval_runtime": 5.0486, | |
| "eval_samples_per_second": 291.961, | |
| "eval_steps_per_second": 4.754, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 711, | |
| "total_flos": 277125248581632.0, | |
| "train_loss": 0.9885472514458347, | |
| "train_runtime": 1164.5291, | |
| "train_samples_per_second": 77.885, | |
| "train_steps_per_second": 0.611 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 711, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "total_flos": 277125248581632.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |