| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9950738916256157, | |
| "eval_steps": 76, | |
| "global_step": 608, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003284072249589491, | |
| "grad_norm": 6271.83657859153, | |
| "learning_rate": 0.0, | |
| "loss": 0.7244, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.003284072249589491, | |
| "eval_loss": 0.5559563636779785, | |
| "eval_runtime": 253.2891, | |
| "eval_samples_per_second": 8.157, | |
| "eval_steps_per_second": 0.257, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006568144499178982, | |
| "grad_norm": 5855.051575469832, | |
| "learning_rate": 3.3333333333333334e-08, | |
| "loss": 0.7743, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009852216748768473, | |
| "grad_norm": 5966.931878891552, | |
| "learning_rate": 6.666666666666667e-08, | |
| "loss": 0.7495, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.013136288998357963, | |
| "grad_norm": 2146.4401329128254, | |
| "learning_rate": 1e-07, | |
| "loss": 0.7853, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.016420361247947456, | |
| "grad_norm": 1479.4537989114276, | |
| "learning_rate": 1.3333333333333334e-07, | |
| "loss": 0.7486, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.019704433497536946, | |
| "grad_norm": 2073.875643154414, | |
| "learning_rate": 1.6666666666666665e-07, | |
| "loss": 0.7395, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.022988505747126436, | |
| "grad_norm": 3306.267015593175, | |
| "learning_rate": 2e-07, | |
| "loss": 0.7873, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.026272577996715927, | |
| "grad_norm": 5570.427974319344, | |
| "learning_rate": 2.3333333333333333e-07, | |
| "loss": 0.7897, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.029556650246305417, | |
| "grad_norm": 5192.155077763508, | |
| "learning_rate": 2.6666666666666667e-07, | |
| "loss": 0.7411, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03284072249589491, | |
| "grad_norm": 3385.4892242590813, | |
| "learning_rate": 3e-07, | |
| "loss": 0.7656, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0361247947454844, | |
| "grad_norm": 1680.342831450108, | |
| "learning_rate": 3.333333333333333e-07, | |
| "loss": 0.7801, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03940886699507389, | |
| "grad_norm": 4799.608669273176, | |
| "learning_rate": 3.666666666666666e-07, | |
| "loss": 0.7407, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.042692939244663386, | |
| "grad_norm": 2371.102370606964, | |
| "learning_rate": 4e-07, | |
| "loss": 0.7861, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04597701149425287, | |
| "grad_norm": 22928.120442114872, | |
| "learning_rate": 4.3333333333333335e-07, | |
| "loss": 0.7326, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04926108374384237, | |
| "grad_norm": 5866.1404741418755, | |
| "learning_rate": 4.6666666666666666e-07, | |
| "loss": 0.7236, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.052545155993431854, | |
| "grad_norm": 10335.132176960362, | |
| "learning_rate": 5e-07, | |
| "loss": 0.7353, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05582922824302135, | |
| "grad_norm": 11731.708675839953, | |
| "learning_rate": 5.333333333333333e-07, | |
| "loss": 0.7368, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.059113300492610835, | |
| "grad_norm": 8477.833222009613, | |
| "learning_rate": 5.666666666666666e-07, | |
| "loss": 0.7401, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06239737274220033, | |
| "grad_norm": 5269.214248760142, | |
| "learning_rate": 6e-07, | |
| "loss": 0.7378, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06568144499178982, | |
| "grad_norm": 2906.1574452333357, | |
| "learning_rate": 6.333333333333332e-07, | |
| "loss": 0.7661, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 2376.80127554879, | |
| "learning_rate": 6.666666666666666e-07, | |
| "loss": 0.7457, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0722495894909688, | |
| "grad_norm": 1796.841170343826, | |
| "learning_rate": 7e-07, | |
| "loss": 0.7252, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0755336617405583, | |
| "grad_norm": 4583.563298783243, | |
| "learning_rate": 7.333333333333332e-07, | |
| "loss": 0.7417, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07881773399014778, | |
| "grad_norm": 5571.34039922786, | |
| "learning_rate": 7.666666666666667e-07, | |
| "loss": 0.7449, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.08210180623973727, | |
| "grad_norm": 1788.7468612154946, | |
| "learning_rate": 8e-07, | |
| "loss": 0.7515, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08538587848932677, | |
| "grad_norm": 3448.4947901602436, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 0.6949, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08866995073891626, | |
| "grad_norm": 1036.8720881203337, | |
| "learning_rate": 8.666666666666667e-07, | |
| "loss": 0.7494, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.09195402298850575, | |
| "grad_norm": 2185.0194951434873, | |
| "learning_rate": 9e-07, | |
| "loss": 0.7197, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09523809523809523, | |
| "grad_norm": 2269.166582616754, | |
| "learning_rate": 9.333333333333333e-07, | |
| "loss": 0.7033, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.09852216748768473, | |
| "grad_norm": 11624.24252175601, | |
| "learning_rate": 9.666666666666666e-07, | |
| "loss": 0.6986, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10180623973727422, | |
| "grad_norm": 4520.614291277772, | |
| "learning_rate": 1e-06, | |
| "loss": 0.7107, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10509031198686371, | |
| "grad_norm": 13701.111972610783, | |
| "learning_rate": 1.0333333333333333e-06, | |
| "loss": 0.7159, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.10837438423645321, | |
| "grad_norm": 7449.497627677214, | |
| "learning_rate": 1.0666666666666667e-06, | |
| "loss": 0.7135, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1116584564860427, | |
| "grad_norm": 2245.9408876416637, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.6697, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.11494252873563218, | |
| "grad_norm": 926.1867127034129, | |
| "learning_rate": 1.1333333333333332e-06, | |
| "loss": 0.6825, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.11822660098522167, | |
| "grad_norm": 1329.9772406233828, | |
| "learning_rate": 1.1666666666666668e-06, | |
| "loss": 0.6795, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.12151067323481117, | |
| "grad_norm": 1000.9622533410908, | |
| "learning_rate": 1.2e-06, | |
| "loss": 0.7306, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.12479474548440066, | |
| "grad_norm": 993.8780279192466, | |
| "learning_rate": 1.2333333333333333e-06, | |
| "loss": 0.6348, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.12807881773399016, | |
| "grad_norm": 945.1494487867217, | |
| "learning_rate": 1.2666666666666665e-06, | |
| "loss": 0.6355, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.13136288998357964, | |
| "grad_norm": 179.6616198157567, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.65, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13464696223316913, | |
| "grad_norm": 742.7710802743131, | |
| "learning_rate": 1.3333333333333332e-06, | |
| "loss": 0.6482, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 587.9958891842592, | |
| "learning_rate": 1.3666666666666666e-06, | |
| "loss": 0.6399, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1412151067323481, | |
| "grad_norm": 865.3394610899527, | |
| "learning_rate": 1.4e-06, | |
| "loss": 0.6455, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1444991789819376, | |
| "grad_norm": 433.76765611396485, | |
| "learning_rate": 1.4333333333333333e-06, | |
| "loss": 0.6863, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1477832512315271, | |
| "grad_norm": 112.10192933210595, | |
| "learning_rate": 1.4666666666666665e-06, | |
| "loss": 0.645, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1510673234811166, | |
| "grad_norm": 2442.2715722240287, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.6667, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.15435139573070608, | |
| "grad_norm": 319.9299141888628, | |
| "learning_rate": 1.5333333333333334e-06, | |
| "loss": 0.631, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.15763546798029557, | |
| "grad_norm": 158.95826958739744, | |
| "learning_rate": 1.5666666666666666e-06, | |
| "loss": 0.6113, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.16091954022988506, | |
| "grad_norm": 215.59873480353468, | |
| "learning_rate": 1.6e-06, | |
| "loss": 0.6124, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.16420361247947454, | |
| "grad_norm": 126.6284211890604, | |
| "learning_rate": 1.6333333333333333e-06, | |
| "loss": 0.6154, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.16748768472906403, | |
| "grad_norm": 430.61576491190806, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 0.6379, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.17077175697865354, | |
| "grad_norm": 338.16595207485216, | |
| "learning_rate": 1.6999999999999998e-06, | |
| "loss": 0.6133, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.17405582922824303, | |
| "grad_norm": 182.98276970174174, | |
| "learning_rate": 1.7333333333333334e-06, | |
| "loss": 0.6288, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.17733990147783252, | |
| "grad_norm": 183.8220113000725, | |
| "learning_rate": 1.7666666666666666e-06, | |
| "loss": 0.6234, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.180623973727422, | |
| "grad_norm": 66.92408391602912, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.6182, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.1839080459770115, | |
| "grad_norm": 203.51503397263764, | |
| "learning_rate": 1.833333333333333e-06, | |
| "loss": 0.6466, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.18719211822660098, | |
| "grad_norm": 63.90718235472869, | |
| "learning_rate": 1.8666666666666667e-06, | |
| "loss": 0.6321, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.19047619047619047, | |
| "grad_norm": 46.03571035353064, | |
| "learning_rate": 1.8999999999999998e-06, | |
| "loss": 0.5958, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.19376026272577998, | |
| "grad_norm": 47.046353460290206, | |
| "learning_rate": 1.933333333333333e-06, | |
| "loss": 0.6416, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.19704433497536947, | |
| "grad_norm": 72.74885795414816, | |
| "learning_rate": 1.9666666666666663e-06, | |
| "loss": 0.6279, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.20032840722495895, | |
| "grad_norm": 43.09881595994001, | |
| "learning_rate": 2e-06, | |
| "loss": 0.6432, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.20361247947454844, | |
| "grad_norm": 385.8401754134751, | |
| "learning_rate": 1.9999835673561283e-06, | |
| "loss": 0.6153, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 55.270632684943024, | |
| "learning_rate": 1.999934269964577e-06, | |
| "loss": 0.606, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.21018062397372742, | |
| "grad_norm": 112.35685565413114, | |
| "learning_rate": 1.9998521094455194e-06, | |
| "loss": 0.6397, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.2134646962233169, | |
| "grad_norm": 145.4743296112891, | |
| "learning_rate": 1.999737088499184e-06, | |
| "loss": 0.6133, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.21674876847290642, | |
| "grad_norm": 92.85749788738731, | |
| "learning_rate": 1.9995892109057674e-06, | |
| "loss": 0.59, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.2200328407224959, | |
| "grad_norm": 119.32804840506846, | |
| "learning_rate": 1.9994084815253095e-06, | |
| "loss": 0.6377, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.2233169129720854, | |
| "grad_norm": 337.56982786001447, | |
| "learning_rate": 1.9991949062975332e-06, | |
| "loss": 0.5827, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.22660098522167488, | |
| "grad_norm": 159.57391171072265, | |
| "learning_rate": 1.99894849224165e-06, | |
| "loss": 0.5926, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.22988505747126436, | |
| "grad_norm": 94.69383065711324, | |
| "learning_rate": 1.998669247456129e-06, | |
| "loss": 0.6249, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.23316912972085385, | |
| "grad_norm": 30.865025764067703, | |
| "learning_rate": 1.9983571811184294e-06, | |
| "loss": 0.615, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.23645320197044334, | |
| "grad_norm": 365.12385141652493, | |
| "learning_rate": 1.9980123034847023e-06, | |
| "loss": 0.6231, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.23973727422003285, | |
| "grad_norm": 24.986994644338345, | |
| "learning_rate": 1.99763462588945e-06, | |
| "loss": 0.6005, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.24302134646962234, | |
| "grad_norm": 156.97247949274475, | |
| "learning_rate": 1.997224160745155e-06, | |
| "loss": 0.5986, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.24630541871921183, | |
| "grad_norm": 104.35232755227744, | |
| "learning_rate": 1.9967809215418726e-06, | |
| "loss": 0.616, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.24958949096880131, | |
| "grad_norm": 174.47979716876907, | |
| "learning_rate": 1.996304922846787e-06, | |
| "loss": 0.5997, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.24958949096880131, | |
| "eval_loss": 0.37612438201904297, | |
| "eval_runtime": 252.9482, | |
| "eval_samples_per_second": 8.168, | |
| "eval_steps_per_second": 0.257, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.25287356321839083, | |
| "grad_norm": 633.6612254567843, | |
| "learning_rate": 1.9957961803037326e-06, | |
| "loss": 0.5899, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.2561576354679803, | |
| "grad_norm": 138.79302081360208, | |
| "learning_rate": 1.995254710632678e-06, | |
| "loss": 0.638, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.2594417077175698, | |
| "grad_norm": 17.771737220416696, | |
| "learning_rate": 1.994680531629181e-06, | |
| "loss": 0.6313, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.2627257799671593, | |
| "grad_norm": 123.9631655047847, | |
| "learning_rate": 1.9940736621637997e-06, | |
| "loss": 0.6279, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.2660098522167488, | |
| "grad_norm": 203.7102057464755, | |
| "learning_rate": 1.9934341221814736e-06, | |
| "loss": 0.6131, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.26929392446633826, | |
| "grad_norm": 79.30066676169302, | |
| "learning_rate": 1.992761932700868e-06, | |
| "loss": 0.5929, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.27257799671592775, | |
| "grad_norm": 128.16665832374028, | |
| "learning_rate": 1.9920571158136835e-06, | |
| "loss": 0.6162, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 100.90994251994853, | |
| "learning_rate": 1.99131969468393e-06, | |
| "loss": 0.5838, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.2791461412151067, | |
| "grad_norm": 68.97227461151734, | |
| "learning_rate": 1.990549693547166e-06, | |
| "loss": 0.5783, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2824302134646962, | |
| "grad_norm": 65.63550727300574, | |
| "learning_rate": 1.989747137709699e-06, | |
| "loss": 0.5717, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 105.48653502246952, | |
| "learning_rate": 1.988912053547758e-06, | |
| "loss": 0.6336, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.2889983579638752, | |
| "grad_norm": 151.45819328204334, | |
| "learning_rate": 1.988044468506625e-06, | |
| "loss": 0.5966, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2922824302134647, | |
| "grad_norm": 73.02161989799154, | |
| "learning_rate": 1.9871444110997308e-06, | |
| "loss": 0.6377, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2955665024630542, | |
| "grad_norm": 55.00481114192419, | |
| "learning_rate": 1.9862119109077224e-06, | |
| "loss": 0.6023, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2988505747126437, | |
| "grad_norm": 30.849915628138252, | |
| "learning_rate": 1.985246998577486e-06, | |
| "loss": 0.5983, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.3021346469622332, | |
| "grad_norm": 105.68162830661484, | |
| "learning_rate": 1.9842497058211427e-06, | |
| "loss": 0.6191, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.3054187192118227, | |
| "grad_norm": 44.85816714268683, | |
| "learning_rate": 1.9832200654150074e-06, | |
| "loss": 0.6019, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.30870279146141216, | |
| "grad_norm": 94.09459294470936, | |
| "learning_rate": 1.982158111198507e-06, | |
| "loss": 0.6224, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.31198686371100165, | |
| "grad_norm": 72.91849721086875, | |
| "learning_rate": 1.9810638780730727e-06, | |
| "loss": 0.6135, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.31527093596059114, | |
| "grad_norm": 155.69433676198233, | |
| "learning_rate": 1.979937402000991e-06, | |
| "loss": 0.5619, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3185550082101806, | |
| "grad_norm": 91.64919893957587, | |
| "learning_rate": 1.978778720004222e-06, | |
| "loss": 0.5789, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3218390804597701, | |
| "grad_norm": 58.767110092919424, | |
| "learning_rate": 1.9775878701631836e-06, | |
| "loss": 0.6161, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.3251231527093596, | |
| "grad_norm": 86.39938638182066, | |
| "learning_rate": 1.976364891615498e-06, | |
| "loss": 0.5871, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3284072249589491, | |
| "grad_norm": 30.43501640235004, | |
| "learning_rate": 1.975109824554707e-06, | |
| "loss": 0.6057, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.33169129720853857, | |
| "grad_norm": 65.6953811514428, | |
| "learning_rate": 1.9738227102289505e-06, | |
| "loss": 0.5836, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.33497536945812806, | |
| "grad_norm": 34.69550148683274, | |
| "learning_rate": 1.972503590939612e-06, | |
| "loss": 0.6239, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.33825944170771755, | |
| "grad_norm": 76.34647471342404, | |
| "learning_rate": 1.971152510039926e-06, | |
| "loss": 0.6006, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.3415435139573071, | |
| "grad_norm": 116.3926642735392, | |
| "learning_rate": 1.9697695119335546e-06, | |
| "loss": 0.5907, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 26.910647996197213, | |
| "learning_rate": 1.968354642073129e-06, | |
| "loss": 0.5843, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.34811165845648606, | |
| "grad_norm": 125.40744182953463, | |
| "learning_rate": 1.9669079469587545e-06, | |
| "loss": 0.5907, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.35139573070607555, | |
| "grad_norm": 40.15061266508225, | |
| "learning_rate": 1.965429474136482e-06, | |
| "loss": 0.6412, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.35467980295566504, | |
| "grad_norm": 31.1618141731111, | |
| "learning_rate": 1.963919272196746e-06, | |
| "loss": 0.6126, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3579638752052545, | |
| "grad_norm": 114.23556602704848, | |
| "learning_rate": 1.962377390772768e-06, | |
| "loss": 0.6034, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.361247947454844, | |
| "grad_norm": 40.58756337364396, | |
| "learning_rate": 1.960803880538925e-06, | |
| "loss": 0.6223, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3645320197044335, | |
| "grad_norm": 30.422688962559107, | |
| "learning_rate": 1.9591987932090833e-06, | |
| "loss": 0.5825, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.367816091954023, | |
| "grad_norm": 51.96081689297722, | |
| "learning_rate": 1.9575621815348996e-06, | |
| "loss": 0.5922, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.37110016420361247, | |
| "grad_norm": 151.3652907897673, | |
| "learning_rate": 1.9558940993040883e-06, | |
| "loss": 0.6014, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.37438423645320196, | |
| "grad_norm": 179.13984258713396, | |
| "learning_rate": 1.9541946013386506e-06, | |
| "loss": 0.5812, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.37766830870279144, | |
| "grad_norm": 209.51648981274894, | |
| "learning_rate": 1.9524637434930776e-06, | |
| "loss": 0.5744, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 77.78117906962474, | |
| "learning_rate": 1.950701582652509e-06, | |
| "loss": 0.5924, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3842364532019704, | |
| "grad_norm": 56.596618911823576, | |
| "learning_rate": 1.9489081767308697e-06, | |
| "loss": 0.5951, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.38752052545155996, | |
| "grad_norm": 25.934351726934093, | |
| "learning_rate": 1.9470835846689596e-06, | |
| "loss": 0.6187, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.39080459770114945, | |
| "grad_norm": 78.54544999607046, | |
| "learning_rate": 1.9452278664325227e-06, | |
| "loss": 0.609, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.39408866995073893, | |
| "grad_norm": 79.08588241935573, | |
| "learning_rate": 1.943341083010272e-06, | |
| "loss": 0.56, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3973727422003284, | |
| "grad_norm": 72.17985855738158, | |
| "learning_rate": 1.9414232964118892e-06, | |
| "loss": 0.5955, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4006568144499179, | |
| "grad_norm": 56.33038272717607, | |
| "learning_rate": 1.9394745696659807e-06, | |
| "loss": 0.5745, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.4039408866995074, | |
| "grad_norm": 101.16342455706896, | |
| "learning_rate": 1.9374949668180134e-06, | |
| "loss": 0.5891, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.4072249589490969, | |
| "grad_norm": 67.57709272161114, | |
| "learning_rate": 1.935484552928204e-06, | |
| "loss": 0.6101, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.41050903119868637, | |
| "grad_norm": 22.702908704179823, | |
| "learning_rate": 1.9334433940693826e-06, | |
| "loss": 0.5904, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 112.85103846233335, | |
| "learning_rate": 1.9313715573248235e-06, | |
| "loss": 0.5888, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.41707717569786534, | |
| "grad_norm": 74.44347415115806, | |
| "learning_rate": 1.929269110786037e-06, | |
| "loss": 0.6, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.42036124794745483, | |
| "grad_norm": 129.01611720733342, | |
| "learning_rate": 1.9271361235505337e-06, | |
| "loss": 0.5721, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.4236453201970443, | |
| "grad_norm": 21.624403814788998, | |
| "learning_rate": 1.9249726657195533e-06, | |
| "loss": 0.5868, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.4269293924466338, | |
| "grad_norm": 43.134924014466655, | |
| "learning_rate": 1.9227788083957586e-06, | |
| "loss": 0.5981, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.4302134646962233, | |
| "grad_norm": 102.45985784089031, | |
| "learning_rate": 1.9205546236809032e-06, | |
| "loss": 0.5906, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.43349753694581283, | |
| "grad_norm": 60.949447673351266, | |
| "learning_rate": 1.9183001846734574e-06, | |
| "loss": 0.6352, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.4367816091954023, | |
| "grad_norm": 401.41383723285713, | |
| "learning_rate": 1.9160155654662073e-06, | |
| "loss": 0.5842, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.4400656814449918, | |
| "grad_norm": 35.25770567828027, | |
| "learning_rate": 1.913700841143821e-06, | |
| "loss": 0.5862, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4433497536945813, | |
| "grad_norm": 914.3282302161468, | |
| "learning_rate": 1.9113560877803796e-06, | |
| "loss": 0.5763, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4466338259441708, | |
| "grad_norm": 35.54605377595188, | |
| "learning_rate": 1.908981382436876e-06, | |
| "loss": 0.5727, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.44991789819376027, | |
| "grad_norm": 43.4767061362858, | |
| "learning_rate": 1.906576803158686e-06, | |
| "loss": 0.5969, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.45320197044334976, | |
| "grad_norm": 23.151864647002014, | |
| "learning_rate": 1.904142428972999e-06, | |
| "loss": 0.5726, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.45648604269293924, | |
| "grad_norm": 19.480214528552803, | |
| "learning_rate": 1.9016783398862226e-06, | |
| "loss": 0.581, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.45977011494252873, | |
| "grad_norm": 38.790487317877094, | |
| "learning_rate": 1.8991846168813544e-06, | |
| "loss": 0.6188, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4630541871921182, | |
| "grad_norm": 27.00623790662471, | |
| "learning_rate": 1.8966613419153178e-06, | |
| "loss": 0.6242, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4663382594417077, | |
| "grad_norm": 22.367501221325167, | |
| "learning_rate": 1.8941085979162713e-06, | |
| "loss": 0.6027, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4696223316912972, | |
| "grad_norm": 447.12696125749153, | |
| "learning_rate": 1.8915264687808804e-06, | |
| "loss": 0.5907, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4729064039408867, | |
| "grad_norm": 32.44003369156479, | |
| "learning_rate": 1.8889150393715625e-06, | |
| "loss": 0.6062, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.47619047619047616, | |
| "grad_norm": 144.01477406191725, | |
| "learning_rate": 1.8862743955136963e-06, | |
| "loss": 0.6012, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.4794745484400657, | |
| "grad_norm": 75.34671278459541, | |
| "learning_rate": 1.8836046239928022e-06, | |
| "loss": 0.5843, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 46.75651484429922, | |
| "learning_rate": 1.8809058125516893e-06, | |
| "loss": 0.5883, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4860426929392447, | |
| "grad_norm": 92.53132858786016, | |
| "learning_rate": 1.8781780498875723e-06, | |
| "loss": 0.5989, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.48932676518883417, | |
| "grad_norm": 22.671976729298738, | |
| "learning_rate": 1.875421425649156e-06, | |
| "loss": 0.6094, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.49261083743842365, | |
| "grad_norm": 26.928492455992494, | |
| "learning_rate": 1.8726360304336893e-06, | |
| "loss": 0.5949, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.49589490968801314, | |
| "grad_norm": 286.98230638362656, | |
| "learning_rate": 1.8698219557839872e-06, | |
| "loss": 0.595, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.49917898193760263, | |
| "grad_norm": 79.72951321359584, | |
| "learning_rate": 1.8669792941854229e-06, | |
| "loss": 0.6124, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.49917898193760263, | |
| "eval_loss": 0.35242295265197754, | |
| "eval_runtime": 254.4417, | |
| "eval_samples_per_second": 8.12, | |
| "eval_steps_per_second": 0.255, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.5024630541871922, | |
| "grad_norm": 64.33414761071143, | |
| "learning_rate": 1.8641081390628876e-06, | |
| "loss": 0.585, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.5057471264367817, | |
| "grad_norm": 31.517353545501283, | |
| "learning_rate": 1.8612085847777212e-06, | |
| "loss": 0.602, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.5090311986863711, | |
| "grad_norm": 45.827071354966, | |
| "learning_rate": 1.858280726624609e-06, | |
| "loss": 0.5806, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5123152709359606, | |
| "grad_norm": 18.978499335170554, | |
| "learning_rate": 1.855324660828452e-06, | |
| "loss": 0.6165, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.5155993431855501, | |
| "grad_norm": 67.94220614733077, | |
| "learning_rate": 1.8523404845412025e-06, | |
| "loss": 0.6229, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5188834154351396, | |
| "grad_norm": 19.582077106202128, | |
| "learning_rate": 1.8493282958386739e-06, | |
| "loss": 0.5532, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5221674876847291, | |
| "grad_norm": 53.75335205438214, | |
| "learning_rate": 1.846288193717314e-06, | |
| "loss": 0.6104, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5254515599343186, | |
| "grad_norm": 38.261538748017415, | |
| "learning_rate": 1.8432202780909538e-06, | |
| "loss": 0.5848, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5287356321839081, | |
| "grad_norm": 222.09670680924359, | |
| "learning_rate": 1.8401246497875235e-06, | |
| "loss": 0.5867, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5320197044334976, | |
| "grad_norm": 571.9505490444766, | |
| "learning_rate": 1.8370014105457376e-06, | |
| "loss": 0.5695, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.535303776683087, | |
| "grad_norm": 48.34961117571771, | |
| "learning_rate": 1.8338506630117526e-06, | |
| "loss": 0.5905, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5385878489326765, | |
| "grad_norm": 39.05700378122741, | |
| "learning_rate": 1.830672510735793e-06, | |
| "loss": 0.5984, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.541871921182266, | |
| "grad_norm": 58.20418532997132, | |
| "learning_rate": 1.8274670581687478e-06, | |
| "loss": 0.587, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5451559934318555, | |
| "grad_norm": 245.8655111712564, | |
| "learning_rate": 1.8242344106587377e-06, | |
| "loss": 0.6031, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.548440065681445, | |
| "grad_norm": 105.7509385578569, | |
| "learning_rate": 1.8209746744476536e-06, | |
| "loss": 0.5801, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 97.03979541618602, | |
| "learning_rate": 1.8176879566676639e-06, | |
| "loss": 0.6077, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.555008210180624, | |
| "grad_norm": 14.137405437866256, | |
| "learning_rate": 1.8143743653376943e-06, | |
| "loss": 0.592, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5582922824302134, | |
| "grad_norm": 46.24134681947936, | |
| "learning_rate": 1.811034009359877e-06, | |
| "loss": 0.5912, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5615763546798029, | |
| "grad_norm": 101.35151290055389, | |
| "learning_rate": 1.8076669985159725e-06, | |
| "loss": 0.6158, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5648604269293924, | |
| "grad_norm": 140.1896691880712, | |
| "learning_rate": 1.8042734434637613e-06, | |
| "loss": 0.5692, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5681444991789819, | |
| "grad_norm": 226.4192589085135, | |
| "learning_rate": 1.8008534557334063e-06, | |
| "loss": 0.5754, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 45.671389908527075, | |
| "learning_rate": 1.7974071477237885e-06, | |
| "loss": 0.6016, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5747126436781609, | |
| "grad_norm": 19.244548559672378, | |
| "learning_rate": 1.7939346326988125e-06, | |
| "loss": 0.5594, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5779967159277504, | |
| "grad_norm": 201.20738634541007, | |
| "learning_rate": 1.7904360247836834e-06, | |
| "loss": 0.5781, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5812807881773399, | |
| "grad_norm": 41.13648160887617, | |
| "learning_rate": 1.7869114389611573e-06, | |
| "loss": 0.5769, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5845648604269293, | |
| "grad_norm": 287.9411519318713, | |
| "learning_rate": 1.7833609910677612e-06, | |
| "loss": 0.5819, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5878489326765188, | |
| "grad_norm": 24.18855361442465, | |
| "learning_rate": 1.779784797789987e-06, | |
| "loss": 0.5964, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5911330049261084, | |
| "grad_norm": 24.012668455231225, | |
| "learning_rate": 1.7761829766604554e-06, | |
| "loss": 0.6134, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5944170771756979, | |
| "grad_norm": 28.51880265050756, | |
| "learning_rate": 1.772555646054055e-06, | |
| "loss": 0.602, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5977011494252874, | |
| "grad_norm": 15.794558397944432, | |
| "learning_rate": 1.768902925184049e-06, | |
| "loss": 0.5821, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.6009852216748769, | |
| "grad_norm": 58.628563783312366, | |
| "learning_rate": 1.7652249340981604e-06, | |
| "loss": 0.6285, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.6042692939244664, | |
| "grad_norm": 17.619228357058972, | |
| "learning_rate": 1.7615217936746242e-06, | |
| "loss": 0.596, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.6075533661740559, | |
| "grad_norm": 207.93744899824176, | |
| "learning_rate": 1.7577936256182167e-06, | |
| "loss": 0.5917, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.6108374384236454, | |
| "grad_norm": 41.840591224822674, | |
| "learning_rate": 1.754040552456253e-06, | |
| "loss": 0.5866, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.6141215106732348, | |
| "grad_norm": 93.8154760971863, | |
| "learning_rate": 1.7502626975345626e-06, | |
| "loss": 0.6073, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6174055829228243, | |
| "grad_norm": 17.492683199274612, | |
| "learning_rate": 1.746460185013435e-06, | |
| "loss": 0.5964, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 29.276597988664253, | |
| "learning_rate": 1.742633139863538e-06, | |
| "loss": 0.5754, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6239737274220033, | |
| "grad_norm": 68.18460724104617, | |
| "learning_rate": 1.7387816878618117e-06, | |
| "loss": 0.5803, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6272577996715928, | |
| "grad_norm": 56.35078039350749, | |
| "learning_rate": 1.7349059555873343e-06, | |
| "loss": 0.5787, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6305418719211823, | |
| "grad_norm": 43.29495360037644, | |
| "learning_rate": 1.7310060704171627e-06, | |
| "loss": 0.5522, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6338259441707718, | |
| "grad_norm": 68.5551565674711, | |
| "learning_rate": 1.7270821605221446e-06, | |
| "loss": 0.6055, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6371100164203612, | |
| "grad_norm": 117.39744403334397, | |
| "learning_rate": 1.723134354862708e-06, | |
| "loss": 0.5713, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6403940886699507, | |
| "grad_norm": 189.04818071139826, | |
| "learning_rate": 1.7191627831846222e-06, | |
| "loss": 0.5748, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.6436781609195402, | |
| "grad_norm": 74.96348879368328, | |
| "learning_rate": 1.7151675760147325e-06, | |
| "loss": 0.5767, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6469622331691297, | |
| "grad_norm": 19.283409944711153, | |
| "learning_rate": 1.7111488646566725e-06, | |
| "loss": 0.5835, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6502463054187192, | |
| "grad_norm": 28.125800950282475, | |
| "learning_rate": 1.7071067811865474e-06, | |
| "loss": 0.6006, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6535303776683087, | |
| "grad_norm": 197.02038970341124, | |
| "learning_rate": 1.7030414584485934e-06, | |
| "loss": 0.6071, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6568144499178982, | |
| "grad_norm": 39.027824525327794, | |
| "learning_rate": 1.6989530300508123e-06, | |
| "loss": 0.5967, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6600985221674877, | |
| "grad_norm": 31.571940924781416, | |
| "learning_rate": 1.6948416303605793e-06, | |
| "loss": 0.6396, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6633825944170771, | |
| "grad_norm": 129.62214181745804, | |
| "learning_rate": 1.6907073945002288e-06, | |
| "loss": 0.5821, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 162.51175499429485, | |
| "learning_rate": 1.6865504583426116e-06, | |
| "loss": 0.6123, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6699507389162561, | |
| "grad_norm": 37.68050097895022, | |
| "learning_rate": 1.6823709585066306e-06, | |
| "loss": 0.578, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6732348111658456, | |
| "grad_norm": 20.017550125639158, | |
| "learning_rate": 1.6781690323527509e-06, | |
| "loss": 0.5854, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6765188834154351, | |
| "grad_norm": 76.61122117606573, | |
| "learning_rate": 1.6739448179784844e-06, | |
| "loss": 0.589, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6798029556650246, | |
| "grad_norm": 22.411521664367136, | |
| "learning_rate": 1.6696984542138519e-06, | |
| "loss": 0.5844, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6830870279146142, | |
| "grad_norm": 98.32888397272134, | |
| "learning_rate": 1.6654300806168206e-06, | |
| "loss": 0.5972, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6863711001642037, | |
| "grad_norm": 25.354018912353993, | |
| "learning_rate": 1.661139837468717e-06, | |
| "loss": 0.5749, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 109.57794389864921, | |
| "learning_rate": 1.6568278657696162e-06, | |
| "loss": 0.5318, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6929392446633826, | |
| "grad_norm": 184.91691946093704, | |
| "learning_rate": 1.6524943072337092e-06, | |
| "loss": 0.5955, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6962233169129721, | |
| "grad_norm": 51.18780040494431, | |
| "learning_rate": 1.6481393042846442e-06, | |
| "loss": 0.5899, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6995073891625616, | |
| "grad_norm": 93.66347257011115, | |
| "learning_rate": 1.6437630000508462e-06, | |
| "loss": 0.5723, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.7027914614121511, | |
| "grad_norm": 49.27370857815919, | |
| "learning_rate": 1.6393655383608132e-06, | |
| "loss": 0.6148, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.7060755336617406, | |
| "grad_norm": 25.246859061737737, | |
| "learning_rate": 1.6349470637383888e-06, | |
| "loss": 0.589, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.7093596059113301, | |
| "grad_norm": 112.54633084879946, | |
| "learning_rate": 1.630507721398013e-06, | |
| "loss": 0.6262, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.7126436781609196, | |
| "grad_norm": 59.80877460933437, | |
| "learning_rate": 1.6260476572399493e-06, | |
| "loss": 0.573, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.715927750410509, | |
| "grad_norm": 221.05125034165957, | |
| "learning_rate": 1.6215670178454892e-06, | |
| "loss": 0.6132, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.7192118226600985, | |
| "grad_norm": 32.62986909195134, | |
| "learning_rate": 1.6170659504721363e-06, | |
| "loss": 0.5978, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.722495894909688, | |
| "grad_norm": 79.18706822806207, | |
| "learning_rate": 1.6125446030487642e-06, | |
| "loss": 0.6197, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7257799671592775, | |
| "grad_norm": 646.9711401182135, | |
| "learning_rate": 1.6080031241707576e-06, | |
| "loss": 0.5909, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.729064039408867, | |
| "grad_norm": 227.7039164426062, | |
| "learning_rate": 1.6034416630951265e-06, | |
| "loss": 0.5617, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7323481116584565, | |
| "grad_norm": 86.24663093418314, | |
| "learning_rate": 1.5988603697356009e-06, | |
| "loss": 0.568, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.735632183908046, | |
| "grad_norm": 28.186646002874514, | |
| "learning_rate": 1.5942593946577065e-06, | |
| "loss": 0.5739, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7389162561576355, | |
| "grad_norm": 114.91672123046412, | |
| "learning_rate": 1.5896388890738127e-06, | |
| "loss": 0.5854, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.7422003284072249, | |
| "grad_norm": 110.20008502969989, | |
| "learning_rate": 1.5849990048381648e-06, | |
| "loss": 0.5792, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7454844006568144, | |
| "grad_norm": 118.55251219758163, | |
| "learning_rate": 1.5803398944418933e-06, | |
| "loss": 0.5776, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7487684729064039, | |
| "grad_norm": 55.68698315902024, | |
| "learning_rate": 1.575661711008002e-06, | |
| "loss": 0.5644, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.7487684729064039, | |
| "eval_loss": 0.3468731641769409, | |
| "eval_runtime": 254.0407, | |
| "eval_samples_per_second": 8.133, | |
| "eval_steps_per_second": 0.256, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.7520525451559934, | |
| "grad_norm": 32.913448787991214, | |
| "learning_rate": 1.570964608286336e-06, | |
| "loss": 0.5904, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7553366174055829, | |
| "grad_norm": 48.81085602810963, | |
| "learning_rate": 1.566248740648527e-06, | |
| "loss": 0.585, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 43.34405794275439, | |
| "learning_rate": 1.5615142630829228e-06, | |
| "loss": 0.5503, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 24.78222801953508, | |
| "learning_rate": 1.5567613311894907e-06, | |
| "loss": 0.6158, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.7651888341543513, | |
| "grad_norm": 20.01938395622359, | |
| "learning_rate": 1.5519901011747043e-06, | |
| "loss": 0.5622, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7684729064039408, | |
| "grad_norm": 128.40682635925788, | |
| "learning_rate": 1.5472007298464116e-06, | |
| "loss": 0.5866, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7717569786535303, | |
| "grad_norm": 18.43305197325785, | |
| "learning_rate": 1.5423933746086793e-06, | |
| "loss": 0.5952, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7750410509031199, | |
| "grad_norm": 100.43618051137392, | |
| "learning_rate": 1.5375681934566202e-06, | |
| "loss": 0.5882, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7783251231527094, | |
| "grad_norm": 24.521874696776806, | |
| "learning_rate": 1.5327253449712018e-06, | |
| "loss": 0.5784, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7816091954022989, | |
| "grad_norm": 123.57359702902865, | |
| "learning_rate": 1.5278649883140329e-06, | |
| "loss": 0.6413, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7848932676518884, | |
| "grad_norm": 106.32565785443477, | |
| "learning_rate": 1.5229872832221333e-06, | |
| "loss": 0.5668, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7881773399014779, | |
| "grad_norm": 99.10065342575062, | |
| "learning_rate": 1.5180923900026845e-06, | |
| "loss": 0.5928, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7914614121510674, | |
| "grad_norm": 130.80914850175915, | |
| "learning_rate": 1.513180469527761e-06, | |
| "loss": 0.5926, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7947454844006568, | |
| "grad_norm": 94.9924662311697, | |
| "learning_rate": 1.5082516832290421e-06, | |
| "loss": 0.6016, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7980295566502463, | |
| "grad_norm": 197.53102698941842, | |
| "learning_rate": 1.5033061930925078e-06, | |
| "loss": 0.5731, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.8013136288998358, | |
| "grad_norm": 38.788949526243016, | |
| "learning_rate": 1.498344161653115e-06, | |
| "loss": 0.5963, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.8045977011494253, | |
| "grad_norm": 38.30742214846052, | |
| "learning_rate": 1.493365751989454e-06, | |
| "loss": 0.5638, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.8078817733990148, | |
| "grad_norm": 648.6594536144033, | |
| "learning_rate": 1.4883711277183915e-06, | |
| "loss": 0.5868, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.8111658456486043, | |
| "grad_norm": 265.95515256929093, | |
| "learning_rate": 1.4833604529896908e-06, | |
| "loss": 0.5512, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.8144499178981938, | |
| "grad_norm": 170.80635770181408, | |
| "learning_rate": 1.478333892480619e-06, | |
| "loss": 0.5994, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.8177339901477833, | |
| "grad_norm": 1398.0497669156412, | |
| "learning_rate": 1.4732916113905333e-06, | |
| "loss": 0.5887, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.8210180623973727, | |
| "grad_norm": 251.7115266399908, | |
| "learning_rate": 1.4682337754354533e-06, | |
| "loss": 0.5735, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8243021346469622, | |
| "grad_norm": 124.88918006964796, | |
| "learning_rate": 1.4631605508426121e-06, | |
| "loss": 0.6044, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 608.3413931999685, | |
| "learning_rate": 1.4580721043449966e-06, | |
| "loss": 0.5628, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8308702791461412, | |
| "grad_norm": 1194.4815644571033, | |
| "learning_rate": 1.4529686031758642e-06, | |
| "loss": 0.5933, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.8341543513957307, | |
| "grad_norm": 113.35099537580673, | |
| "learning_rate": 1.4478502150632501e-06, | |
| "loss": 0.5861, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.8374384236453202, | |
| "grad_norm": 153.99285185333872, | |
| "learning_rate": 1.442717108224452e-06, | |
| "loss": 0.5812, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8407224958949097, | |
| "grad_norm": 130.21821279469813, | |
| "learning_rate": 1.4375694513605034e-06, | |
| "loss": 0.5732, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8440065681444991, | |
| "grad_norm": 35.99304972206288, | |
| "learning_rate": 1.4324074136506282e-06, | |
| "loss": 0.5845, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8472906403940886, | |
| "grad_norm": 12.583104402647741, | |
| "learning_rate": 1.4272311647466808e-06, | |
| "loss": 0.6195, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8505747126436781, | |
| "grad_norm": 161.8183385483987, | |
| "learning_rate": 1.4220408747675712e-06, | |
| "loss": 0.6064, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8538587848932676, | |
| "grad_norm": 25.593864430902567, | |
| "learning_rate": 1.4168367142936734e-06, | |
| "loss": 0.5897, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 60.9645827697532, | |
| "learning_rate": 1.411618854361218e-06, | |
| "loss": 0.6029, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.8604269293924466, | |
| "grad_norm": 313.86128927339445, | |
| "learning_rate": 1.4063874664566732e-06, | |
| "loss": 0.5766, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.8637110016420362, | |
| "grad_norm": 376.4774663209203, | |
| "learning_rate": 1.401142722511109e-06, | |
| "loss": 0.5905, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.8669950738916257, | |
| "grad_norm": 23.684255757274983, | |
| "learning_rate": 1.3958847948945428e-06, | |
| "loss": 0.5928, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8702791461412152, | |
| "grad_norm": 18.327052041947013, | |
| "learning_rate": 1.3906138564102792e-06, | |
| "loss": 0.5659, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8735632183908046, | |
| "grad_norm": 31.74783455878064, | |
| "learning_rate": 1.3853300802892282e-06, | |
| "loss": 0.5656, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.8768472906403941, | |
| "grad_norm": 419.0756338024995, | |
| "learning_rate": 1.3800336401842127e-06, | |
| "loss": 0.5986, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8801313628899836, | |
| "grad_norm": 77.68111946128553, | |
| "learning_rate": 1.3747247101642602e-06, | |
| "loss": 0.6006, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8834154351395731, | |
| "grad_norm": 75.41201589840144, | |
| "learning_rate": 1.3694034647088838e-06, | |
| "loss": 0.5778, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8866995073891626, | |
| "grad_norm": 36.241115646339644, | |
| "learning_rate": 1.3640700787023462e-06, | |
| "loss": 0.565, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.8899835796387521, | |
| "grad_norm": 135.20927104693348, | |
| "learning_rate": 1.3587247274279139e-06, | |
| "loss": 0.5763, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8932676518883416, | |
| "grad_norm": 87.93638862635923, | |
| "learning_rate": 1.3533675865620936e-06, | |
| "loss": 0.5963, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 29.613470120000667, | |
| "learning_rate": 1.3479988321688618e-06, | |
| "loss": 0.577, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8998357963875205, | |
| "grad_norm": 14.319123531102576, | |
| "learning_rate": 1.3426186406938766e-06, | |
| "loss": 0.5961, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.90311986863711, | |
| "grad_norm": 169.5219058735149, | |
| "learning_rate": 1.337227188958679e-06, | |
| "loss": 0.5897, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.9064039408866995, | |
| "grad_norm": 38.00458314499751, | |
| "learning_rate": 1.331824654154881e-06, | |
| "loss": 0.5613, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.909688013136289, | |
| "grad_norm": 14.378667726194587, | |
| "learning_rate": 1.3264112138383443e-06, | |
| "loss": 0.6258, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.9129720853858785, | |
| "grad_norm": 42.72991885435684, | |
| "learning_rate": 1.320987045923342e-06, | |
| "loss": 0.5773, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.916256157635468, | |
| "grad_norm": 68.40318666339633, | |
| "learning_rate": 1.315552328676714e-06, | |
| "loss": 0.5839, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.9195402298850575, | |
| "grad_norm": 45.180537540210565, | |
| "learning_rate": 1.3101072407120055e-06, | |
| "loss": 0.5955, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.922824302134647, | |
| "grad_norm": 39.16288757906327, | |
| "learning_rate": 1.3046519609836e-06, | |
| "loss": 0.5927, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9261083743842364, | |
| "grad_norm": 67.73988324657877, | |
| "learning_rate": 1.2991866687808353e-06, | |
| "loss": 0.5799, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9293924466338259, | |
| "grad_norm": 353.64259849704393, | |
| "learning_rate": 1.2937115437221117e-06, | |
| "loss": 0.5764, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9326765188834154, | |
| "grad_norm": 54.971519691287966, | |
| "learning_rate": 1.2882267657489908e-06, | |
| "loss": 0.5543, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.9359605911330049, | |
| "grad_norm": 22.759273671919335, | |
| "learning_rate": 1.2827325151202782e-06, | |
| "loss": 0.582, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9392446633825944, | |
| "grad_norm": 14.41964639616655, | |
| "learning_rate": 1.2772289724061014e-06, | |
| "loss": 0.6231, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9425287356321839, | |
| "grad_norm": 154.5418617403585, | |
| "learning_rate": 1.2717163184819759e-06, | |
| "loss": 0.6038, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9458128078817734, | |
| "grad_norm": 14.212387952750024, | |
| "learning_rate": 1.2661947345228593e-06, | |
| "loss": 0.5684, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.9490968801313628, | |
| "grad_norm": 123.37078826032788, | |
| "learning_rate": 1.2606644019971966e-06, | |
| "loss": 0.5795, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 17.029678211667036, | |
| "learning_rate": 1.255125502660958e-06, | |
| "loss": 0.6055, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9556650246305419, | |
| "grad_norm": 43.985222402630065, | |
| "learning_rate": 1.2495782185516637e-06, | |
| "loss": 0.5785, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.9589490968801314, | |
| "grad_norm": 26.180114326110402, | |
| "learning_rate": 1.2440227319824022e-06, | |
| "loss": 0.5672, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.9622331691297209, | |
| "grad_norm": 58.05962982942946, | |
| "learning_rate": 1.2384592255358384e-06, | |
| "loss": 0.5723, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 96.86728362154787, | |
| "learning_rate": 1.232887882058212e-06, | |
| "loss": 0.5667, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.9688013136288999, | |
| "grad_norm": 31.777624654282178, | |
| "learning_rate": 1.2273088846533302e-06, | |
| "loss": 0.6115, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.9720853858784894, | |
| "grad_norm": 83.38296129361547, | |
| "learning_rate": 1.2217224166765475e-06, | |
| "loss": 0.5732, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.9753694581280788, | |
| "grad_norm": 121.56881681339887, | |
| "learning_rate": 1.2161286617287418e-06, | |
| "loss": 0.5954, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.9786535303776683, | |
| "grad_norm": 54.8264040669832, | |
| "learning_rate": 1.2105278036502787e-06, | |
| "loss": 0.5931, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9819376026272578, | |
| "grad_norm": 21.35246678957699, | |
| "learning_rate": 1.2049200265149707e-06, | |
| "loss": 0.5694, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9852216748768473, | |
| "grad_norm": 1059.6690924678123, | |
| "learning_rate": 1.1993055146240272e-06, | |
| "loss": 0.5658, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9885057471264368, | |
| "grad_norm": 26.188608952399726, | |
| "learning_rate": 1.1936844524999966e-06, | |
| "loss": 0.5654, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9917898193760263, | |
| "grad_norm": 21.558049696210244, | |
| "learning_rate": 1.1880570248807032e-06, | |
| "loss": 0.5839, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9950738916256158, | |
| "grad_norm": 17.803809249890314, | |
| "learning_rate": 1.1824234167131746e-06, | |
| "loss": 0.5563, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9983579638752053, | |
| "grad_norm": 70.52662623832066, | |
| "learning_rate": 1.1767838131475654e-06, | |
| "loss": 0.5626, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9983579638752053, | |
| "eval_loss": 0.3444424271583557, | |
| "eval_runtime": 255.3718, | |
| "eval_samples_per_second": 8.09, | |
| "eval_steps_per_second": 0.255, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 70.52662623832066, | |
| "learning_rate": 1.171138399531068e-06, | |
| "loss": 0.6032, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.0032840722495895, | |
| "grad_norm": 65.13191709579829, | |
| "learning_rate": 1.1654873614018266e-06, | |
| "loss": 0.5511, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.006568144499179, | |
| "grad_norm": 285.9719298771201, | |
| "learning_rate": 1.1598308844828345e-06, | |
| "loss": 0.6026, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.0098522167487685, | |
| "grad_norm": 37.39321171527541, | |
| "learning_rate": 1.154169154675834e-06, | |
| "loss": 0.5785, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.013136288998358, | |
| "grad_norm": 41.68071927705891, | |
| "learning_rate": 1.1485023580552039e-06, | |
| "loss": 0.6114, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.0164203612479474, | |
| "grad_norm": 135.40715808644197, | |
| "learning_rate": 1.1428306808618454e-06, | |
| "loss": 0.5782, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.019704433497537, | |
| "grad_norm": 22.790140257232764, | |
| "learning_rate": 1.137154309497062e-06, | |
| "loss": 0.5687, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.0229885057471264, | |
| "grad_norm": 141.91339180106723, | |
| "learning_rate": 1.131473430516432e-06, | |
| "loss": 0.6137, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.026272577996716, | |
| "grad_norm": 72.39990304750096, | |
| "learning_rate": 1.1257882306236775e-06, | |
| "loss": 0.6173, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.0295566502463054, | |
| "grad_norm": 44.689199344971264, | |
| "learning_rate": 1.1200988966645284e-06, | |
| "loss": 0.5684, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.0328407224958949, | |
| "grad_norm": 18.212438241962214, | |
| "learning_rate": 1.1144056156205831e-06, | |
| "loss": 0.5885, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.0361247947454844, | |
| "grad_norm": 32.514249930046795, | |
| "learning_rate": 1.108708574603161e-06, | |
| "loss": 0.6074, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.0394088669950738, | |
| "grad_norm": 17.685539914578786, | |
| "learning_rate": 1.1030079608471542e-06, | |
| "loss": 0.5701, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.0426929392446633, | |
| "grad_norm": 50.50561698330298, | |
| "learning_rate": 1.0973039617048747e-06, | |
| "loss": 0.6148, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.0459770114942528, | |
| "grad_norm": 30.494423044684428, | |
| "learning_rate": 1.0915967646398948e-06, | |
| "loss": 0.5631, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.0492610837438423, | |
| "grad_norm": 43.634293011722335, | |
| "learning_rate": 1.0858865572208891e-06, | |
| "loss": 0.5563, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0525451559934318, | |
| "grad_norm": 34.28951266031906, | |
| "learning_rate": 1.0801735271154667e-06, | |
| "loss": 0.5703, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.0558292282430213, | |
| "grad_norm": 40.35540642489708, | |
| "learning_rate": 1.0744578620840063e-06, | |
| "loss": 0.5682, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.0591133004926108, | |
| "grad_norm": 90.22818294715287, | |
| "learning_rate": 1.068739749973484e-06, | |
| "loss": 0.5676, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.0623973727422003, | |
| "grad_norm": 27.34387701783233, | |
| "learning_rate": 1.0630193787112993e-06, | |
| "loss": 0.5751, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.0656814449917897, | |
| "grad_norm": 56.638276613527445, | |
| "learning_rate": 1.0572969362990997e-06, | |
| "loss": 0.6014, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.0689655172413792, | |
| "grad_norm": 15.866790094277658, | |
| "learning_rate": 1.0515726108066023e-06, | |
| "loss": 0.5916, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.0722495894909687, | |
| "grad_norm": 12.950547272697525, | |
| "learning_rate": 1.0458465903654105e-06, | |
| "loss": 0.5734, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.0755336617405582, | |
| "grad_norm": 17.279307686255628, | |
| "learning_rate": 1.0401190631628347e-06, | |
| "loss": 0.5899, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.0788177339901477, | |
| "grad_norm": 26.36195986983623, | |
| "learning_rate": 1.0343902174357038e-06, | |
| "loss": 0.5936, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.0821018062397372, | |
| "grad_norm": 53.631919940788016, | |
| "learning_rate": 1.0286602414641815e-06, | |
| "loss": 0.6059, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0853858784893267, | |
| "grad_norm": 37.558445588790654, | |
| "learning_rate": 1.0229293235655768e-06, | |
| "loss": 0.5573, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.0886699507389164, | |
| "grad_norm": 15.958318674221253, | |
| "learning_rate": 1.017197652088155e-06, | |
| "loss": 0.6102, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0919540229885056, | |
| "grad_norm": 40.26474283910818, | |
| "learning_rate": 1.0114654154049489e-06, | |
| "loss": 0.5895, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0952380952380953, | |
| "grad_norm": 146.08840587905024, | |
| "learning_rate": 1.0057328019075668e-06, | |
| "loss": 0.5726, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0985221674876848, | |
| "grad_norm": 202.1576115663822, | |
| "learning_rate": 1e-06, | |
| "loss": 0.577, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.1018062397372743, | |
| "grad_norm": 14.523463101670913, | |
| "learning_rate": 9.942671980924335e-07, | |
| "loss": 0.5872, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.1050903119868638, | |
| "grad_norm": 32.09224127026215, | |
| "learning_rate": 9.885345845950508e-07, | |
| "loss": 0.6071, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.1083743842364533, | |
| "grad_norm": 13.896096566133576, | |
| "learning_rate": 9.828023479118448e-07, | |
| "loss": 0.5989, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.1116584564860428, | |
| "grad_norm": 58.06165751181001, | |
| "learning_rate": 9.770706764344234e-07, | |
| "loss": 0.5656, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.1149425287356323, | |
| "grad_norm": 20.036356068409564, | |
| "learning_rate": 9.713397585358188e-07, | |
| "loss": 0.5858, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1182266009852218, | |
| "grad_norm": 10.100931882003527, | |
| "learning_rate": 9.65609782564296e-07, | |
| "loss": 0.5855, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.1215106732348112, | |
| "grad_norm": 13.587121183019379, | |
| "learning_rate": 9.598809368371654e-07, | |
| "loss": 0.6379, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.1247947454844007, | |
| "grad_norm": 126.86325897606325, | |
| "learning_rate": 9.541534096345897e-07, | |
| "loss": 0.5546, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.1280788177339902, | |
| "grad_norm": 169.89985357006515, | |
| "learning_rate": 9.484273891933981e-07, | |
| "loss": 0.5563, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.1313628899835797, | |
| "grad_norm": 49.08212390882699, | |
| "learning_rate": 9.427030637009002e-07, | |
| "loss": 0.5699, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.1346469622331692, | |
| "grad_norm": 40.18400735284968, | |
| "learning_rate": 9.369806212887007e-07, | |
| "loss": 0.5715, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.1379310344827587, | |
| "grad_norm": 18.860982747952214, | |
| "learning_rate": 9.312602500265159e-07, | |
| "loss": 0.5648, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.1412151067323482, | |
| "grad_norm": 10.902272643558419, | |
| "learning_rate": 9.255421379159933e-07, | |
| "loss": 0.5735, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.1444991789819376, | |
| "grad_norm": 66.12684936925638, | |
| "learning_rate": 9.198264728845331e-07, | |
| "loss": 0.6154, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.1477832512315271, | |
| "grad_norm": 28.02996822136127, | |
| "learning_rate": 9.141134427791109e-07, | |
| "loss": 0.5778, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.1510673234811166, | |
| "grad_norm": 30.51347632697842, | |
| "learning_rate": 9.084032353601052e-07, | |
| "loss": 0.6041, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.154351395730706, | |
| "grad_norm": 49.552466990210895, | |
| "learning_rate": 9.026960382951252e-07, | |
| "loss": 0.5671, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.1576354679802956, | |
| "grad_norm": 44.54866711025706, | |
| "learning_rate": 8.969920391528457e-07, | |
| "loss": 0.5506, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.160919540229885, | |
| "grad_norm": 52.856706858816, | |
| "learning_rate": 8.912914253968391e-07, | |
| "loss": 0.554, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.1642036124794746, | |
| "grad_norm": 47.388855420890756, | |
| "learning_rate": 8.85594384379417e-07, | |
| "loss": 0.5551, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.167487684729064, | |
| "grad_norm": 30.628301446970205, | |
| "learning_rate": 8.799011033354715e-07, | |
| "loss": 0.5831, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.1707717569786535, | |
| "grad_norm": 22.952426384099383, | |
| "learning_rate": 8.742117693763227e-07, | |
| "loss": 0.5597, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.174055829228243, | |
| "grad_norm": 17.939719949680224, | |
| "learning_rate": 8.685265694835681e-07, | |
| "loss": 0.577, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.1773399014778325, | |
| "grad_norm": 27.110777787948248, | |
| "learning_rate": 8.628456905029382e-07, | |
| "loss": 0.5726, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.180623973727422, | |
| "grad_norm": 13.896701402873619, | |
| "learning_rate": 8.571693191381544e-07, | |
| "loss": 0.5623, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.1839080459770115, | |
| "grad_norm": 15.205774656115123, | |
| "learning_rate": 8.514976419447963e-07, | |
| "loss": 0.5943, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.187192118226601, | |
| "grad_norm": 45.58828461857186, | |
| "learning_rate": 8.458308453241663e-07, | |
| "loss": 0.5817, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.1904761904761905, | |
| "grad_norm": 116.8441584869502, | |
| "learning_rate": 8.401691155171652e-07, | |
| "loss": 0.5444, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.19376026272578, | |
| "grad_norm": 170.81143104953796, | |
| "learning_rate": 8.345126385981735e-07, | |
| "loss": 0.5904, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1970443349753694, | |
| "grad_norm": 41.71381416465335, | |
| "learning_rate": 8.288616004689319e-07, | |
| "loss": 0.5784, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.200328407224959, | |
| "grad_norm": 115.88107721819807, | |
| "learning_rate": 8.23216186852435e-07, | |
| "loss": 0.594, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.2036124794745484, | |
| "grad_norm": 17.490648876213996, | |
| "learning_rate": 8.175765832868251e-07, | |
| "loss": 0.576, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 39.174813390569106, | |
| "learning_rate": 8.11942975119297e-07, | |
| "loss": 0.5627, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.2101806239737274, | |
| "grad_norm": 668.8532117187957, | |
| "learning_rate": 8.063155475000035e-07, | |
| "loss": 0.5987, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.2134646962233169, | |
| "grad_norm": 29.45366016595164, | |
| "learning_rate": 8.006944853759732e-07, | |
| "loss": 0.5788, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.2167487684729064, | |
| "grad_norm": 19.85954794293891, | |
| "learning_rate": 7.950799734850291e-07, | |
| "loss": 0.5552, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.2200328407224958, | |
| "grad_norm": 11.951933824951752, | |
| "learning_rate": 7.894721963497213e-07, | |
| "loss": 0.5991, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.2233169129720853, | |
| "grad_norm": 51.890411987411, | |
| "learning_rate": 7.838713382712583e-07, | |
| "loss": 0.5494, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.2266009852216748, | |
| "grad_norm": 46.51346153636678, | |
| "learning_rate": 7.78277583323452e-07, | |
| "loss": 0.5601, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.2298850574712643, | |
| "grad_norm": 32.19714846447316, | |
| "learning_rate": 7.726911153466697e-07, | |
| "loss": 0.5891, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.2331691297208538, | |
| "grad_norm": 30.416108844072596, | |
| "learning_rate": 7.671121179417879e-07, | |
| "loss": 0.57, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.2364532019704433, | |
| "grad_norm": 9.175156240446686, | |
| "learning_rate": 7.615407744641618e-07, | |
| "loss": 0.592, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.2397372742200328, | |
| "grad_norm": 16.89408306679232, | |
| "learning_rate": 7.559772680175978e-07, | |
| "loss": 0.5581, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.2430213464696223, | |
| "grad_norm": 36.581208571354075, | |
| "learning_rate": 7.504217814483363e-07, | |
| "loss": 0.5684, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.2463054187192117, | |
| "grad_norm": 197.1580272004894, | |
| "learning_rate": 7.448744973390422e-07, | |
| "loss": 0.5843, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2463054187192117, | |
| "eval_loss": 0.3425952196121216, | |
| "eval_runtime": 254.289, | |
| "eval_samples_per_second": 8.125, | |
| "eval_steps_per_second": 0.256, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2495894909688012, | |
| "grad_norm": 38.00508215291586, | |
| "learning_rate": 7.393355980028038e-07, | |
| "loss": 0.5705, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.2528735632183907, | |
| "grad_norm": 36.66893410967853, | |
| "learning_rate": 7.338052654771407e-07, | |
| "loss": 0.5621, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.2561576354679804, | |
| "grad_norm": 72.0846419386083, | |
| "learning_rate": 7.28283681518024e-07, | |
| "loss": 0.6053, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.2594417077175697, | |
| "grad_norm": 12.87863278880059, | |
| "learning_rate": 7.227710275938987e-07, | |
| "loss": 0.585, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.2627257799671594, | |
| "grad_norm": 13.782452001612908, | |
| "learning_rate": 7.172674848797217e-07, | |
| "loss": 0.5973, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.2660098522167487, | |
| "grad_norm": 208.71637259406816, | |
| "learning_rate": 7.117732342510092e-07, | |
| "loss": 0.586, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.2692939244663384, | |
| "grad_norm": 16.912304799585513, | |
| "learning_rate": 7.062884562778882e-07, | |
| "loss": 0.5636, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.2725779967159276, | |
| "grad_norm": 28.08532516731275, | |
| "learning_rate": 7.008133312191649e-07, | |
| "loss": 0.5897, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.2758620689655173, | |
| "grad_norm": 55.25592778507127, | |
| "learning_rate": 6.953480390164e-07, | |
| "loss": 0.5581, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.2791461412151066, | |
| "grad_norm": 52.471536677355836, | |
| "learning_rate": 6.898927592879944e-07, | |
| "loss": 0.5488, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.2824302134646963, | |
| "grad_norm": 33.60924814778164, | |
| "learning_rate": 6.844476713232862e-07, | |
| "loss": 0.5452, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.2857142857142856, | |
| "grad_norm": 44.60405738816861, | |
| "learning_rate": 6.79012954076658e-07, | |
| "loss": 0.6057, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.2889983579638753, | |
| "grad_norm": 52.13213673114732, | |
| "learning_rate": 6.735887861616555e-07, | |
| "loss": 0.5718, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.2922824302134646, | |
| "grad_norm": 15.614998167223849, | |
| "learning_rate": 6.681753458451189e-07, | |
| "loss": 0.6077, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.2955665024630543, | |
| "grad_norm": 21.557430927559825, | |
| "learning_rate": 6.627728110413213e-07, | |
| "loss": 0.5724, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.2988505747126438, | |
| "grad_norm": 23.88672470819002, | |
| "learning_rate": 6.573813593061235e-07, | |
| "loss": 0.5623, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.3021346469622332, | |
| "grad_norm": 95.59341827721282, | |
| "learning_rate": 6.520011678311381e-07, | |
| "loss": 0.5932, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.3054187192118227, | |
| "grad_norm": 44.267252382206344, | |
| "learning_rate": 6.466324134379065e-07, | |
| "loss": 0.5699, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.3087027914614122, | |
| "grad_norm": 20.628188906562613, | |
| "learning_rate": 6.412752725720864e-07, | |
| "loss": 0.5971, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.3119868637110017, | |
| "grad_norm": 16.82849216948797, | |
| "learning_rate": 6.359299212976534e-07, | |
| "loss": 0.5843, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.3152709359605912, | |
| "grad_norm": 18.08566660075251, | |
| "learning_rate": 6.305965352911161e-07, | |
| "loss": 0.5404, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.3185550082101807, | |
| "grad_norm": 49.91580760003982, | |
| "learning_rate": 6.252752898357397e-07, | |
| "loss": 0.5531, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.3218390804597702, | |
| "grad_norm": 15.520146318832731, | |
| "learning_rate": 6.199663598157875e-07, | |
| "loss": 0.5872, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.3251231527093597, | |
| "grad_norm": 14.249014867996666, | |
| "learning_rate": 6.146699197107715e-07, | |
| "loss": 0.5623, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.3284072249589491, | |
| "grad_norm": 13.497082574794579, | |
| "learning_rate": 6.093861435897207e-07, | |
| "loss": 0.5707, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.3316912972085386, | |
| "grad_norm": 26.344677464265622, | |
| "learning_rate": 6.041152051054575e-07, | |
| "loss": 0.5597, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.3349753694581281, | |
| "grad_norm": 14.044122777177984, | |
| "learning_rate": 5.988572774888912e-07, | |
| "loss": 0.5924, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.3382594417077176, | |
| "grad_norm": 52.51863026094184, | |
| "learning_rate": 5.936125335433264e-07, | |
| "loss": 0.5774, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.341543513957307, | |
| "grad_norm": 36.13736458430556, | |
| "learning_rate": 5.88381145638782e-07, | |
| "loss": 0.5676, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.3448275862068966, | |
| "grad_norm": 15.292686883515266, | |
| "learning_rate": 5.83163285706327e-07, | |
| "loss": 0.5533, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.348111658456486, | |
| "grad_norm": 87.55364165940539, | |
| "learning_rate": 5.779591252324286e-07, | |
| "loss": 0.5676, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.3513957307060755, | |
| "grad_norm": 90.41445749482193, | |
| "learning_rate": 5.72768835253319e-07, | |
| "loss": 0.6106, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.354679802955665, | |
| "grad_norm": 75.68880847383154, | |
| "learning_rate": 5.67592586349372e-07, | |
| "loss": 0.5798, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.3579638752052545, | |
| "grad_norm": 24.386055319065594, | |
| "learning_rate": 5.624305486394967e-07, | |
| "loss": 0.5831, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.361247947454844, | |
| "grad_norm": 218.5561635346433, | |
| "learning_rate": 5.57282891775548e-07, | |
| "loss": 0.5958, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.3645320197044335, | |
| "grad_norm": 38.47004577556696, | |
| "learning_rate": 5.5214978493675e-07, | |
| "loss": 0.5522, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.367816091954023, | |
| "grad_norm": 11.116447526825615, | |
| "learning_rate": 5.470313968241358e-07, | |
| "loss": 0.5655, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.3711001642036125, | |
| "grad_norm": 33.49087738887472, | |
| "learning_rate": 5.419278956550036e-07, | |
| "loss": 0.5818, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.374384236453202, | |
| "grad_norm": 45.327655233207565, | |
| "learning_rate": 5.368394491573876e-07, | |
| "loss": 0.5618, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.3776683087027914, | |
| "grad_norm": 266.5398840292457, | |
| "learning_rate": 5.317662245645469e-07, | |
| "loss": 0.5549, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.380952380952381, | |
| "grad_norm": 20.553987754267183, | |
| "learning_rate": 5.267083886094668e-07, | |
| "loss": 0.5719, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.3842364532019704, | |
| "grad_norm": 74.60343344764495, | |
| "learning_rate": 5.216661075193813e-07, | |
| "loss": 0.5709, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.38752052545156, | |
| "grad_norm": 81.24452905189378, | |
| "learning_rate": 5.166395470103091e-07, | |
| "loss": 0.5839, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.3908045977011494, | |
| "grad_norm": 61.57043976405265, | |
| "learning_rate": 5.116288722816085e-07, | |
| "loss": 0.5881, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.3940886699507389, | |
| "grad_norm": 66.61917309787378, | |
| "learning_rate": 5.066342480105459e-07, | |
| "loss": 0.5396, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.3973727422003284, | |
| "grad_norm": 32.18086853290272, | |
| "learning_rate": 5.016558383468851e-07, | |
| "loss": 0.5743, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.4006568144499179, | |
| "grad_norm": 20.082111947795827, | |
| "learning_rate": 4.966938069074921e-07, | |
| "loss": 0.5531, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.4039408866995073, | |
| "grad_norm": 35.324879089856665, | |
| "learning_rate": 4.91748316770958e-07, | |
| "loss": 0.5685, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.4072249589490968, | |
| "grad_norm": 52.60744635091702, | |
| "learning_rate": 4.868195304722391e-07, | |
| "loss": 0.5892, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.4105090311986863, | |
| "grad_norm": 32.96054620176144, | |
| "learning_rate": 4.819076099973152e-07, | |
| "loss": 0.5593, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 203.82839428938274, | |
| "learning_rate": 4.770127167778669e-07, | |
| "loss": 0.5699, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.4170771756978653, | |
| "grad_norm": 47.57730637955671, | |
| "learning_rate": 4.7213501168596746e-07, | |
| "loss": 0.5789, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.4203612479474548, | |
| "grad_norm": 67.44290592976088, | |
| "learning_rate": 4.6727465502879846e-07, | |
| "loss": 0.5556, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.4236453201970443, | |
| "grad_norm": 44.034037450189444, | |
| "learning_rate": 4.6243180654337966e-07, | |
| "loss": 0.5549, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.4269293924466337, | |
| "grad_norm": 16.61985345751509, | |
| "learning_rate": 4.5760662539132077e-07, | |
| "loss": 0.5747, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.4302134646962232, | |
| "grad_norm": 12.882168633818488, | |
| "learning_rate": 4.5279927015358833e-07, | |
| "loss": 0.5717, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.4334975369458127, | |
| "grad_norm": 20.653148975087017, | |
| "learning_rate": 4.480098988252957e-07, | |
| "loss": 0.6128, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.4367816091954024, | |
| "grad_norm": 23.18939767829898, | |
| "learning_rate": 4.4323866881050945e-07, | |
| "loss": 0.5692, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.4400656814449917, | |
| "grad_norm": 31.061046848137774, | |
| "learning_rate": 4.384857369170771e-07, | |
| "loss": 0.5611, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.4433497536945814, | |
| "grad_norm": 62.563561066656575, | |
| "learning_rate": 4.337512593514728e-07, | |
| "loss": 0.56, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.4466338259441707, | |
| "grad_norm": 17.20298230988248, | |
| "learning_rate": 4.290353917136639e-07, | |
| "loss": 0.5496, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.4499178981937604, | |
| "grad_norm": 10.022400404488588, | |
| "learning_rate": 4.2433828899199807e-07, | |
| "loss": 0.5743, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.4532019704433496, | |
| "grad_norm": 29.312045763415277, | |
| "learning_rate": 4.1966010555810694e-07, | |
| "loss": 0.5445, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.4564860426929394, | |
| "grad_norm": 31.664457234542283, | |
| "learning_rate": 4.150009951618355e-07, | |
| "loss": 0.55, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.4597701149425286, | |
| "grad_norm": 23.68782329905495, | |
| "learning_rate": 4.103611109261872e-07, | |
| "loss": 0.5957, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.4630541871921183, | |
| "grad_norm": 15.601252467779627, | |
| "learning_rate": 4.0574060534229327e-07, | |
| "loss": 0.5965, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.4663382594417076, | |
| "grad_norm": 16.094218846568037, | |
| "learning_rate": 4.011396302643988e-07, | |
| "loss": 0.5731, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.4696223316912973, | |
| "grad_norm": 45.95169139269504, | |
| "learning_rate": 3.965583369048737e-07, | |
| "loss": 0.5778, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.4729064039408866, | |
| "grad_norm": 16.32693066785533, | |
| "learning_rate": 3.9199687582924246e-07, | |
| "loss": 0.582, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.4761904761904763, | |
| "grad_norm": 15.3081339931233, | |
| "learning_rate": 3.8745539695123577e-07, | |
| "loss": 0.5865, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.4794745484400658, | |
| "grad_norm": 41.26715984492636, | |
| "learning_rate": 3.829340495278639e-07, | |
| "loss": 0.5671, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.4827586206896552, | |
| "grad_norm": 14.84205956050775, | |
| "learning_rate": 3.7843298215451046e-07, | |
| "loss": 0.5676, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.4860426929392447, | |
| "grad_norm": 28.834583020994884, | |
| "learning_rate": 3.739523427600508e-07, | |
| "loss": 0.5831, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.4893267651888342, | |
| "grad_norm": 29.206427309967527, | |
| "learning_rate": 3.6949227860198707e-07, | |
| "loss": 0.5808, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.4926108374384237, | |
| "grad_norm": 21.208424115794667, | |
| "learning_rate": 3.6505293626161127e-07, | |
| "loss": 0.5701, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.4958949096880132, | |
| "grad_norm": 60.972107883689446, | |
| "learning_rate": 3.6063446163918663e-07, | |
| "loss": 0.5812, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.4958949096880132, | |
| "eval_loss": 0.34147635102272034, | |
| "eval_runtime": 253.4458, | |
| "eval_samples_per_second": 8.152, | |
| "eval_steps_per_second": 0.256, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.4991789819376027, | |
| "grad_norm": 102.55339346012296, | |
| "learning_rate": 3.5623699994915355e-07, | |
| "loss": 0.5955, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.5024630541871922, | |
| "grad_norm": 76.36859833115236, | |
| "learning_rate": 3.518606957153557e-07, | |
| "loss": 0.5682, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.5057471264367817, | |
| "grad_norm": 24.75361464219726, | |
| "learning_rate": 3.475056927662912e-07, | |
| "loss": 0.5787, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.5090311986863711, | |
| "grad_norm": 27.83323345966019, | |
| "learning_rate": 3.4317213423038384e-07, | |
| "loss": 0.562, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.5123152709359606, | |
| "grad_norm": 46.505159610329876, | |
| "learning_rate": 3.3886016253128327e-07, | |
| "loss": 0.5851, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.5155993431855501, | |
| "grad_norm": 28.610228426478272, | |
| "learning_rate": 3.345699193831795e-07, | |
| "loss": 0.6063, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.5188834154351396, | |
| "grad_norm": 15.024413491910803, | |
| "learning_rate": 3.303015457861478e-07, | |
| "loss": 0.5277, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.522167487684729, | |
| "grad_norm": 12.258290893394133, | |
| "learning_rate": 3.2605518202151574e-07, | |
| "loss": 0.5922, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.5254515599343186, | |
| "grad_norm": 104.31924222325128, | |
| "learning_rate": 3.2183096764724914e-07, | |
| "loss": 0.5649, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.528735632183908, | |
| "grad_norm": 42.86381800387519, | |
| "learning_rate": 3.1762904149336946e-07, | |
| "loss": 0.5743, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.5320197044334976, | |
| "grad_norm": 18.167710389726654, | |
| "learning_rate": 3.134495416573883e-07, | |
| "loss": 0.5572, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.535303776683087, | |
| "grad_norm": 15.343486359801176, | |
| "learning_rate": 3.092926054997711e-07, | |
| "loss": 0.5719, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.5385878489326765, | |
| "grad_norm": 25.820050498948465, | |
| "learning_rate": 3.0515836963942054e-07, | |
| "loss": 0.5785, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.541871921182266, | |
| "grad_norm": 47.76654830827422, | |
| "learning_rate": 3.01046969949188e-07, | |
| "loss": 0.571, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5451559934318555, | |
| "grad_norm": 18.685300965930125, | |
| "learning_rate": 2.969585415514064e-07, | |
| "loss": 0.5899, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.548440065681445, | |
| "grad_norm": 33.45206718151848, | |
| "learning_rate": 2.9289321881345254e-07, | |
| "loss": 0.5661, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.5517241379310345, | |
| "grad_norm": 72.16443965311746, | |
| "learning_rate": 2.8885113534332737e-07, | |
| "loss": 0.5931, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.555008210180624, | |
| "grad_norm": 42.035242935885876, | |
| "learning_rate": 2.848324239852672e-07, | |
| "loss": 0.5587, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.5582922824302134, | |
| "grad_norm": 45.15146887717905, | |
| "learning_rate": 2.80837216815378e-07, | |
| "loss": 0.5728, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.561576354679803, | |
| "grad_norm": 21.81085786153228, | |
| "learning_rate": 2.768656451372919e-07, | |
| "loss": 0.6011, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.5648604269293924, | |
| "grad_norm": 14.491067969072652, | |
| "learning_rate": 2.729178394778554e-07, | |
| "loss": 0.5558, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.568144499178982, | |
| "grad_norm": 17.912743886068196, | |
| "learning_rate": 2.6899392958283706e-07, | |
| "loss": 0.5631, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.5714285714285714, | |
| "grad_norm": 64.4857516790575, | |
| "learning_rate": 2.6509404441266535e-07, | |
| "loss": 0.5841, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.5747126436781609, | |
| "grad_norm": 40.36341697674323, | |
| "learning_rate": 2.6121831213818826e-07, | |
| "loss": 0.5328, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.5779967159277504, | |
| "grad_norm": 28.64813563751768, | |
| "learning_rate": 2.573668601364623e-07, | |
| "loss": 0.5665, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.5812807881773399, | |
| "grad_norm": 20.644096206862233, | |
| "learning_rate": 2.5353981498656505e-07, | |
| "loss": 0.5579, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.5845648604269293, | |
| "grad_norm": 42.66803007868702, | |
| "learning_rate": 2.497373024654373e-07, | |
| "loss": 0.5692, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.5878489326765188, | |
| "grad_norm": 23.741999865754043, | |
| "learning_rate": 2.459594475437472e-07, | |
| "loss": 0.5731, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.5911330049261085, | |
| "grad_norm": 25.83928960154385, | |
| "learning_rate": 2.4220637438178313e-07, | |
| "loss": 0.5895, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.5944170771756978, | |
| "grad_norm": 53.46424769894938, | |
| "learning_rate": 2.3847820632537564e-07, | |
| "loss": 0.5807, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.5977011494252875, | |
| "grad_norm": 32.58548532914844, | |
| "learning_rate": 2.3477506590183972e-07, | |
| "loss": 0.5525, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.6009852216748768, | |
| "grad_norm": 40.67612047783848, | |
| "learning_rate": 2.310970748159511e-07, | |
| "loss": 0.6111, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.6042692939244665, | |
| "grad_norm": 31.120710466672246, | |
| "learning_rate": 2.2744435394594497e-07, | |
| "loss": 0.569, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.6075533661740558, | |
| "grad_norm": 11.431716307911014, | |
| "learning_rate": 2.2381702333954433e-07, | |
| "loss": 0.5804, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.6108374384236455, | |
| "grad_norm": 13.840123564647127, | |
| "learning_rate": 2.2021520221001299e-07, | |
| "loss": 0.57, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.6141215106732347, | |
| "grad_norm": 17.928907652041175, | |
| "learning_rate": 2.1663900893223897e-07, | |
| "loss": 0.5922, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.6174055829228244, | |
| "grad_norm": 57.74383968458873, | |
| "learning_rate": 2.1308856103884277e-07, | |
| "loss": 0.5681, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 49.41941546684546, | |
| "learning_rate": 2.0956397521631662e-07, | |
| "loss": 0.5555, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.6239737274220034, | |
| "grad_norm": 30.887246526627205, | |
| "learning_rate": 2.0606536730118763e-07, | |
| "loss": 0.5666, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.6272577996715927, | |
| "grad_norm": 21.706062838691153, | |
| "learning_rate": 2.0259285227621147e-07, | |
| "loss": 0.5633, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.6305418719211824, | |
| "grad_norm": 31.266696706985737, | |
| "learning_rate": 1.991465442665937e-07, | |
| "loss": 0.5356, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.6338259441707716, | |
| "grad_norm": 29.3689515647996, | |
| "learning_rate": 1.9572655653623882e-07, | |
| "loss": 0.5912, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.6371100164203614, | |
| "grad_norm": 22.322656636658127, | |
| "learning_rate": 1.9233300148402764e-07, | |
| "loss": 0.5595, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.6403940886699506, | |
| "grad_norm": 44.171884336035255, | |
| "learning_rate": 1.8896599064012298e-07, | |
| "loss": 0.5643, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6436781609195403, | |
| "grad_norm": 17.948977196325135, | |
| "learning_rate": 1.8562563466230575e-07, | |
| "loss": 0.5634, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.6469622331691296, | |
| "grad_norm": 11.97820725055819, | |
| "learning_rate": 1.8231204333233607e-07, | |
| "loss": 0.5598, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.6502463054187193, | |
| "grad_norm": 10.612778968035474, | |
| "learning_rate": 1.790253255523465e-07, | |
| "loss": 0.5803, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.6535303776683086, | |
| "grad_norm": 18.193226588472413, | |
| "learning_rate": 1.7576558934126217e-07, | |
| "loss": 0.5967, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.6568144499178983, | |
| "grad_norm": 12.00030723839093, | |
| "learning_rate": 1.7253294183125222e-07, | |
| "loss": 0.5796, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.6600985221674875, | |
| "grad_norm": 61.889660413482744, | |
| "learning_rate": 1.6932748926420693e-07, | |
| "loss": 0.6197, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.6633825944170773, | |
| "grad_norm": 14.289243326025748, | |
| "learning_rate": 1.6614933698824728e-07, | |
| "loss": 0.5715, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 83.1922121560888, | |
| "learning_rate": 1.6299858945426248e-07, | |
| "loss": 0.6025, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.6699507389162562, | |
| "grad_norm": 21.289795508971476, | |
| "learning_rate": 1.5987535021247667e-07, | |
| "loss": 0.5613, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.6732348111658455, | |
| "grad_norm": 9.12840521329095, | |
| "learning_rate": 1.5677972190904621e-07, | |
| "loss": 0.5611, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.6765188834154352, | |
| "grad_norm": 80.54244783118818, | |
| "learning_rate": 1.5371180628268587e-07, | |
| "loss": 0.5773, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.6798029556650245, | |
| "grad_norm": 18.09168094972728, | |
| "learning_rate": 1.5067170416132603e-07, | |
| "loss": 0.5632, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.6830870279146142, | |
| "grad_norm": 76.91613553570883, | |
| "learning_rate": 1.476595154587973e-07, | |
| "loss": 0.5859, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.6863711001642037, | |
| "grad_norm": 13.09746011495923, | |
| "learning_rate": 1.446753391715484e-07, | |
| "loss": 0.5555, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.6896551724137931, | |
| "grad_norm": 26.40133497602893, | |
| "learning_rate": 1.4171927337539104e-07, | |
| "loss": 0.5222, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.6929392446633826, | |
| "grad_norm": 42.003291635263835, | |
| "learning_rate": 1.3879141522227877e-07, | |
| "loss": 0.5857, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.6962233169129721, | |
| "grad_norm": 15.138211036731974, | |
| "learning_rate": 1.3589186093711223e-07, | |
| "loss": 0.576, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.6995073891625616, | |
| "grad_norm": 15.749783656567669, | |
| "learning_rate": 1.3302070581457713e-07, | |
| "loss": 0.5616, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.702791461412151, | |
| "grad_norm": 19.998784663850575, | |
| "learning_rate": 1.3017804421601298e-07, | |
| "loss": 0.6002, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.7060755336617406, | |
| "grad_norm": 12.84175073867242, | |
| "learning_rate": 1.273639695663108e-07, | |
| "loss": 0.5693, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.70935960591133, | |
| "grad_norm": 48.66516503978152, | |
| "learning_rate": 1.2457857435084407e-07, | |
| "loss": 0.6153, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.7126436781609196, | |
| "grad_norm": 16.696763387794824, | |
| "learning_rate": 1.2182195011242747e-07, | |
| "loss": 0.5613, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.715927750410509, | |
| "grad_norm": 31.172316224112276, | |
| "learning_rate": 1.1909418744831046e-07, | |
| "loss": 0.605, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.7192118226600985, | |
| "grad_norm": 19.082497779204967, | |
| "learning_rate": 1.1639537600719761e-07, | |
| "loss": 0.5805, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.722495894909688, | |
| "grad_norm": 15.143716837248418, | |
| "learning_rate": 1.1372560448630375e-07, | |
| "loss": 0.608, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.7257799671592775, | |
| "grad_norm": 37.76170562223439, | |
| "learning_rate": 1.1108496062843741e-07, | |
| "loss": 0.5838, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.729064039408867, | |
| "grad_norm": 14.565254764998345, | |
| "learning_rate": 1.0847353121911951e-07, | |
| "loss": 0.5539, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.7323481116584565, | |
| "grad_norm": 32.47683113493387, | |
| "learning_rate": 1.0589140208372871e-07, | |
| "loss": 0.5575, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.735632183908046, | |
| "grad_norm": 23.18347373384476, | |
| "learning_rate": 1.0333865808468201e-07, | |
| "loss": 0.5566, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.7389162561576355, | |
| "grad_norm": 22.4422071324967, | |
| "learning_rate": 1.0081538311864568e-07, | |
| "loss": 0.576, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.742200328407225, | |
| "grad_norm": 14.059292999171634, | |
| "learning_rate": 9.83216601137773e-08, | |
| "loss": 0.5677, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.7454844006568144, | |
| "grad_norm": 22.050768611070254, | |
| "learning_rate": 9.58575710270011e-08, | |
| "loss": 0.5676, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.7454844006568144, | |
| "eval_loss": 0.34110337495803833, | |
| "eval_runtime": 255.1137, | |
| "eval_samples_per_second": 8.098, | |
| "eval_steps_per_second": 0.255, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.748768472906404, | |
| "grad_norm": 28.327968167510832, | |
| "learning_rate": 9.342319684131395e-08, | |
| "loss": 0.553, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.7520525451559934, | |
| "grad_norm": 23.765559507553224, | |
| "learning_rate": 9.101861756312368e-08, | |
| "loss": 0.5751, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.7553366174055829, | |
| "grad_norm": 38.32793241416281, | |
| "learning_rate": 8.864391221962064e-08, | |
| "loss": 0.5721, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.7586206896551724, | |
| "grad_norm": 23.1435189905778, | |
| "learning_rate": 8.62991588561791e-08, | |
| "loss": 0.5372, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.7619047619047619, | |
| "grad_norm": 64.41410656507954, | |
| "learning_rate": 8.398443453379266e-08, | |
| "loss": 0.597, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.7651888341543513, | |
| "grad_norm": 26.570215615082457, | |
| "learning_rate": 8.169981532654269e-08, | |
| "loss": 0.5425, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.7684729064039408, | |
| "grad_norm": 19.853728565908686, | |
| "learning_rate": 7.944537631909664e-08, | |
| "loss": 0.5781, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.7717569786535303, | |
| "grad_norm": 40.09614615765141, | |
| "learning_rate": 7.722119160424112e-08, | |
| "loss": 0.5738, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.7750410509031198, | |
| "grad_norm": 56.37553140710831, | |
| "learning_rate": 7.502733428044683e-08, | |
| "loss": 0.5788, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.7783251231527095, | |
| "grad_norm": 25.79428871095927, | |
| "learning_rate": 7.286387644946601e-08, | |
| "loss": 0.5614, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.7816091954022988, | |
| "grad_norm": 37.57206986640528, | |
| "learning_rate": 7.073088921396286e-08, | |
| "loss": 0.6319, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.7848932676518885, | |
| "grad_norm": 78.07770173709457, | |
| "learning_rate": 6.862844267517642e-08, | |
| "loss": 0.5585, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.7881773399014778, | |
| "grad_norm": 45.27512632946113, | |
| "learning_rate": 6.655660593061718e-08, | |
| "loss": 0.5835, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.7914614121510675, | |
| "grad_norm": 43.444015827492656, | |
| "learning_rate": 6.451544707179635e-08, | |
| "loss": 0.5839, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.7947454844006567, | |
| "grad_norm": 30.129718093780813, | |
| "learning_rate": 6.250503318198663e-08, | |
| "loss": 0.5922, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.7980295566502464, | |
| "grad_norm": 12.289775538527161, | |
| "learning_rate": 6.052543033401891e-08, | |
| "loss": 0.5659, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.8013136288998357, | |
| "grad_norm": 22.117467884787505, | |
| "learning_rate": 5.8576703588110953e-08, | |
| "loss": 0.5833, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.8045977011494254, | |
| "grad_norm": 18.824028673171902, | |
| "learning_rate": 5.665891698972769e-08, | |
| "loss": 0.5515, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.8078817733990147, | |
| "grad_norm": 15.156245839885349, | |
| "learning_rate": 5.4772133567477454e-08, | |
| "loss": 0.58, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.8111658456486044, | |
| "grad_norm": 17.302260811016588, | |
| "learning_rate": 5.291641533104052e-08, | |
| "loss": 0.5443, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.8144499178981937, | |
| "grad_norm": 72.23650640208795, | |
| "learning_rate": 5.109182326913053e-08, | |
| "loss": 0.5913, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.8177339901477834, | |
| "grad_norm": 41.839452624673, | |
| "learning_rate": 4.929841734749063e-08, | |
| "loss": 0.5826, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.8210180623973726, | |
| "grad_norm": 150.9975193542559, | |
| "learning_rate": 4.75362565069225e-08, | |
| "loss": 0.5672, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.8243021346469623, | |
| "grad_norm": 177.35572727759407, | |
| "learning_rate": 4.580539866134914e-08, | |
| "loss": 0.5962, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 11.471174551449721, | |
| "learning_rate": 4.410590069591191e-08, | |
| "loss": 0.5568, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.8308702791461413, | |
| "grad_norm": 41.93391866191071, | |
| "learning_rate": 4.2437818465100306e-08, | |
| "loss": 0.5872, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.8341543513957306, | |
| "grad_norm": 49.54043761767362, | |
| "learning_rate": 4.080120679091681e-08, | |
| "loss": 0.5782, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.8374384236453203, | |
| "grad_norm": 43.81540298910706, | |
| "learning_rate": 3.919611946107493e-08, | |
| "loss": 0.5726, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.8407224958949095, | |
| "grad_norm": 23.38299802172784, | |
| "learning_rate": 3.762260922723182e-08, | |
| "loss": 0.5658, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.8440065681444993, | |
| "grad_norm": 24.53118761357852, | |
| "learning_rate": 3.6080727803253996e-08, | |
| "loss": 0.5715, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.8472906403940885, | |
| "grad_norm": 19.417907974207314, | |
| "learning_rate": 3.4570525863518164e-08, | |
| "loss": 0.591, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.8505747126436782, | |
| "grad_norm": 10.93698504246622, | |
| "learning_rate": 3.309205304124552e-08, | |
| "loss": 0.598, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.8538587848932675, | |
| "grad_norm": 14.732319976335372, | |
| "learning_rate": 3.164535792687095e-08, | |
| "loss": 0.5739, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.8571428571428572, | |
| "grad_norm": 17.119911911692302, | |
| "learning_rate": 3.0230488066445457e-08, | |
| "loss": 0.5932, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.8604269293924465, | |
| "grad_norm": 18.02807205348044, | |
| "learning_rate": 2.8847489960074133e-08, | |
| "loss": 0.5698, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.8637110016420362, | |
| "grad_norm": 42.32940645169373, | |
| "learning_rate": 2.749640906038797e-08, | |
| "loss": 0.5845, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.8669950738916257, | |
| "grad_norm": 21.291255161442834, | |
| "learning_rate": 2.617728977104927e-08, | |
| "loss": 0.5767, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.8702791461412152, | |
| "grad_norm": 126.41354720715246, | |
| "learning_rate": 2.4890175445293147e-08, | |
| "loss": 0.548, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.8735632183908046, | |
| "grad_norm": 89.21090716126868, | |
| "learning_rate": 2.3635108384502e-08, | |
| "loss": 0.5526, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.8768472906403941, | |
| "grad_norm": 28.90642350240842, | |
| "learning_rate": 2.2412129836816285e-08, | |
| "loss": 0.5923, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.8801313628899836, | |
| "grad_norm": 33.649895492267454, | |
| "learning_rate": 2.122127999577783e-08, | |
| "loss": 0.5915, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.883415435139573, | |
| "grad_norm": 42.351338285848115, | |
| "learning_rate": 2.0062597999009114e-08, | |
| "loss": 0.5692, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.8866995073891626, | |
| "grad_norm": 21.17999827332917, | |
| "learning_rate": 1.8936121926927507e-08, | |
| "loss": 0.5528, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.889983579638752, | |
| "grad_norm": 15.439339266974041, | |
| "learning_rate": 1.7841888801493176e-08, | |
| "loss": 0.5694, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.8932676518883416, | |
| "grad_norm": 13.470779589141687, | |
| "learning_rate": 1.6779934584992718e-08, | |
| "loss": 0.5879, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.896551724137931, | |
| "grad_norm": 27.077399389940133, | |
| "learning_rate": 1.575029417885687e-08, | |
| "loss": 0.5641, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.8998357963875205, | |
| "grad_norm": 19.647395918482665, | |
| "learning_rate": 1.4753001422514121e-08, | |
| "loss": 0.5752, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.90311986863711, | |
| "grad_norm": 33.07929236692647, | |
| "learning_rate": 1.3788089092277688e-08, | |
| "loss": 0.5829, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.9064039408866995, | |
| "grad_norm": 80.07673176809891, | |
| "learning_rate": 1.2855588900269054e-08, | |
| "loss": 0.5497, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.909688013136289, | |
| "grad_norm": 19.376860505012647, | |
| "learning_rate": 1.1955531493375138e-08, | |
| "loss": 0.6026, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.9129720853858785, | |
| "grad_norm": 35.47816644500678, | |
| "learning_rate": 1.108794645224187e-08, | |
| "loss": 0.5666, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.916256157635468, | |
| "grad_norm": 115.1418329735163, | |
| "learning_rate": 1.0252862290301089e-08, | |
| "loss": 0.5753, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.9195402298850575, | |
| "grad_norm": 30.510225961444675, | |
| "learning_rate": 9.450306452834178e-09, | |
| "loss": 0.5848, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.922824302134647, | |
| "grad_norm": 46.46127331757118, | |
| "learning_rate": 8.68030531606967e-09, | |
| "loss": 0.5815, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.9261083743842364, | |
| "grad_norm": 21.7313503869492, | |
| "learning_rate": 7.94288418631639e-09, | |
| "loss": 0.5717, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.929392446633826, | |
| "grad_norm": 127.17771447820743, | |
| "learning_rate": 7.2380672991319e-09, | |
| "loss": 0.571, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.9326765188834154, | |
| "grad_norm": 31.316585311430142, | |
| "learning_rate": 6.565877818526244e-09, | |
| "loss": 0.5453, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.935960591133005, | |
| "grad_norm": 51.54565942906159, | |
| "learning_rate": 5.926337836199891e-09, | |
| "loss": 0.5669, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.9392446633825944, | |
| "grad_norm": 17.804113285989903, | |
| "learning_rate": 5.319468370818536e-09, | |
| "loss": 0.601, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.9425287356321839, | |
| "grad_norm": 68.75929656403164, | |
| "learning_rate": 4.745289367321658e-09, | |
| "loss": 0.5976, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.9458128078817734, | |
| "grad_norm": 11.91134893164374, | |
| "learning_rate": 4.203819696267485e-09, | |
| "loss": 0.5482, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.9490968801313628, | |
| "grad_norm": 17.418945183974255, | |
| "learning_rate": 3.6950771532126003e-09, | |
| "loss": 0.5732, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.9523809523809523, | |
| "grad_norm": 12.085404257651767, | |
| "learning_rate": 3.219078458127078e-09, | |
| "loss": 0.5876, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.9556650246305418, | |
| "grad_norm": 28.855096188397088, | |
| "learning_rate": 2.775839254844925e-09, | |
| "loss": 0.5689, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.9589490968801315, | |
| "grad_norm": 40.37266990923965, | |
| "learning_rate": 2.3653741105499336e-09, | |
| "loss": 0.5541, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.9622331691297208, | |
| "grad_norm": 55.51188079956238, | |
| "learning_rate": 1.98769651529751e-09, | |
| "loss": 0.5642, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.9655172413793105, | |
| "grad_norm": 34.1692976262017, | |
| "learning_rate": 1.6428188815703626e-09, | |
| "loss": 0.5595, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.9688013136288998, | |
| "grad_norm": 14.947354957753587, | |
| "learning_rate": 1.330752543871161e-09, | |
| "loss": 0.6005, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.9720853858784895, | |
| "grad_norm": 45.62738122621848, | |
| "learning_rate": 1.0515077583498344e-09, | |
| "loss": 0.5665, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.9753694581280787, | |
| "grad_norm": 21.725111315877975, | |
| "learning_rate": 8.050937024666193e-10, | |
| "loss": 0.5893, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.9786535303776684, | |
| "grad_norm": 35.375136168343055, | |
| "learning_rate": 5.915184746904112e-10, | |
| "loss": 0.5841, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.9819376026272577, | |
| "grad_norm": 18.604441345264426, | |
| "learning_rate": 4.107890942325332e-10, | |
| "loss": 0.5554, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.9852216748768474, | |
| "grad_norm": 119.72001100708987, | |
| "learning_rate": 2.6291150081603207e-10, | |
| "loss": 0.5614, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.9885057471264367, | |
| "grad_norm": 16.41601120115481, | |
| "learning_rate": 1.4789055448061193e-10, | |
| "loss": 0.5533, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.9917898193760264, | |
| "grad_norm": 124.04621529731233, | |
| "learning_rate": 6.57300354227619e-11, | |
| "loss": 0.5695, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.9950738916256157, | |
| "grad_norm": 41.47359974511061, | |
| "learning_rate": 1.6432643871633346e-11, | |
| "loss": 0.5409, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.9950738916256157, | |
| "eval_loss": 0.34100207686424255, | |
| "eval_runtime": 255.0349, | |
| "eval_samples_per_second": 8.101, | |
| "eval_steps_per_second": 0.255, | |
| "step": 608 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 608, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 152, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.488667559261635e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |