diff --git "a/checkpoint-2000/trainer_state.json" "b/checkpoint-2000/trainer_state.json" deleted file mode 100644--- "a/checkpoint-2000/trainer_state.json" +++ /dev/null @@ -1,14034 +0,0 @@ -{ - "best_global_step": null, - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 0.5310674455655868, - "eval_steps": 500, - "global_step": 2000, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 0.0002655337227827934, - "grad_norm": 26.09913849342828, - "learning_rate": 0.0, - "loss": 3.2031, - "step": 1 - }, - { - "epoch": 0.0005310674455655868, - "grad_norm": 33.24418612316917, - "learning_rate": 2.6525198938992043e-08, - "loss": 3.4062, - "step": 2 - }, - { - "epoch": 0.0007966011683483802, - "grad_norm": 22.666174254080957, - "learning_rate": 5.3050397877984086e-08, - "loss": 3.5469, - "step": 3 - }, - { - "epoch": 0.0010621348911311736, - "grad_norm": 47.54041711728203, - "learning_rate": 7.957559681697613e-08, - "loss": 3.3906, - "step": 4 - }, - { - "epoch": 0.0013276686139139671, - "grad_norm": 28.74129018702557, - "learning_rate": 1.0610079575596817e-07, - "loss": 3.7344, - "step": 5 - }, - { - "epoch": 0.0015932023366967605, - "grad_norm": 34.823794266535295, - "learning_rate": 1.3262599469496022e-07, - "loss": 4.875, - "step": 6 - }, - { - "epoch": 0.0018587360594795538, - "grad_norm": 33.17874143897226, - "learning_rate": 1.5915119363395226e-07, - "loss": 4.8125, - "step": 7 - }, - { - "epoch": 0.002124269782262347, - "grad_norm": 22.916328083623917, - "learning_rate": 1.856763925729443e-07, - "loss": 3.5625, - "step": 8 - }, - { - "epoch": 0.002389803505045141, - "grad_norm": 22.722952034819503, - "learning_rate": 2.1220159151193635e-07, - "loss": 2.8281, - "step": 9 - }, - { - "epoch": 0.0026553372278279343, - "grad_norm": 71.13233477133232, - "learning_rate": 2.387267904509284e-07, - "loss": 4.2812, - "step": 10 - }, - { - "epoch": 0.0029208709506107276, - "grad_norm": 22.509369620011583, - "learning_rate": 2.6525198938992043e-07, - "loss": 2.6562, - "step": 11 - }, - { - "epoch": 0.003186404673393521, - "grad_norm": 36.02796517304586, - "learning_rate": 2.917771883289125e-07, - "loss": 3.9375, - "step": 12 - }, - { - "epoch": 0.0034519383961763143, - "grad_norm": 35.53736888706161, - "learning_rate": 3.183023872679045e-07, - "loss": 4.375, - "step": 13 - }, - { - "epoch": 0.0037174721189591076, - "grad_norm": 21.726341733924155, - "learning_rate": 3.4482758620689656e-07, - "loss": 2.8594, - "step": 14 - }, - { - "epoch": 0.003983005841741901, - "grad_norm": 26.02926274160067, - "learning_rate": 3.713527851458886e-07, - "loss": 3.3125, - "step": 15 - }, - { - "epoch": 0.004248539564524694, - "grad_norm": 33.549456836516455, - "learning_rate": 3.9787798408488065e-07, - "loss": 4.5, - "step": 16 - }, - { - "epoch": 0.004514073287307488, - "grad_norm": 5704.608112490134, - "learning_rate": 4.244031830238727e-07, - "loss": 2.6875, - "step": 17 - }, - { - "epoch": 0.004779607010090282, - "grad_norm": 46.28223578012096, - "learning_rate": 4.5092838196286473e-07, - "loss": 3.3906, - "step": 18 - }, - { - "epoch": 0.005045140732873075, - "grad_norm": 21.51924983534021, - "learning_rate": 4.774535809018568e-07, - "loss": 4.0938, - "step": 19 - }, - { - "epoch": 0.0053106744556558685, - "grad_norm": 27.91289735068091, - "learning_rate": 5.039787798408489e-07, - "loss": 3.4062, - "step": 20 - }, - { - "epoch": 0.0055762081784386614, - "grad_norm": 38.86707334268686, - "learning_rate": 5.305039787798409e-07, - "loss": 4.5938, - "step": 21 - }, - { - "epoch": 0.005841741901221455, - "grad_norm": 46.02296280217304, - "learning_rate": 5.570291777188329e-07, - "loss": 4.5312, - "step": 22 - }, - { - "epoch": 0.006107275624004248, - "grad_norm": 29.064306674372673, - "learning_rate": 5.83554376657825e-07, - "loss": 3.7031, - "step": 23 - }, - { - "epoch": 0.006372809346787042, - "grad_norm": 22.0053607091662, - "learning_rate": 6.10079575596817e-07, - "loss": 4.0938, - "step": 24 - }, - { - "epoch": 0.006638343069569836, - "grad_norm": 25.74196411791827, - "learning_rate": 6.36604774535809e-07, - "loss": 3.1875, - "step": 25 - }, - { - "epoch": 0.006903876792352629, - "grad_norm": 28.41314685895795, - "learning_rate": 6.631299734748012e-07, - "loss": 3.5938, - "step": 26 - }, - { - "epoch": 0.007169410515135422, - "grad_norm": 24.75234923466472, - "learning_rate": 6.896551724137931e-07, - "loss": 2.6875, - "step": 27 - }, - { - "epoch": 0.007434944237918215, - "grad_norm": 30.66371803716353, - "learning_rate": 7.161803713527853e-07, - "loss": 4.0312, - "step": 28 - }, - { - "epoch": 0.007700477960701009, - "grad_norm": 29.00163943943682, - "learning_rate": 7.427055702917772e-07, - "loss": 3.3594, - "step": 29 - }, - { - "epoch": 0.007966011683483803, - "grad_norm": 25.77157343246292, - "learning_rate": 7.692307692307694e-07, - "loss": 3.1875, - "step": 30 - }, - { - "epoch": 0.008231545406266597, - "grad_norm": 22.334502595274675, - "learning_rate": 7.957559681697613e-07, - "loss": 4.7188, - "step": 31 - }, - { - "epoch": 0.008497079129049389, - "grad_norm": 32.603443673328215, - "learning_rate": 8.222811671087534e-07, - "loss": 3.8906, - "step": 32 - }, - { - "epoch": 0.008762612851832182, - "grad_norm": 24.08930675302932, - "learning_rate": 8.488063660477454e-07, - "loss": 2.8906, - "step": 33 - }, - { - "epoch": 0.009028146574614976, - "grad_norm": 32.39366263050705, - "learning_rate": 8.753315649867375e-07, - "loss": 4.125, - "step": 34 - }, - { - "epoch": 0.00929368029739777, - "grad_norm": 29.971764947175103, - "learning_rate": 9.018567639257295e-07, - "loss": 3.2344, - "step": 35 - }, - { - "epoch": 0.009559214020180564, - "grad_norm": 33.19925342837082, - "learning_rate": 9.283819628647216e-07, - "loss": 4.25, - "step": 36 - }, - { - "epoch": 0.009824747742963356, - "grad_norm": 24.097206276546917, - "learning_rate": 9.549071618037136e-07, - "loss": 2.7031, - "step": 37 - }, - { - "epoch": 0.01009028146574615, - "grad_norm": 25.033091440150386, - "learning_rate": 9.814323607427057e-07, - "loss": 3.5469, - "step": 38 - }, - { - "epoch": 0.010355815188528943, - "grad_norm": 19.83584675026491, - "learning_rate": 1.0079575596816979e-06, - "loss": 3.8125, - "step": 39 - }, - { - "epoch": 0.010621348911311737, - "grad_norm": 22.843498610408417, - "learning_rate": 1.0344827586206898e-06, - "loss": 4.0, - "step": 40 - }, - { - "epoch": 0.01088688263409453, - "grad_norm": 27.23163955174518, - "learning_rate": 1.0610079575596817e-06, - "loss": 4.2188, - "step": 41 - }, - { - "epoch": 0.011152416356877323, - "grad_norm": 31.562228731897743, - "learning_rate": 1.0875331564986739e-06, - "loss": 4.375, - "step": 42 - }, - { - "epoch": 0.011417950079660117, - "grad_norm": 22.949386153635178, - "learning_rate": 1.1140583554376658e-06, - "loss": 3.5156, - "step": 43 - }, - { - "epoch": 0.01168348380244291, - "grad_norm": 88.31997625312175, - "learning_rate": 1.140583554376658e-06, - "loss": 2.7812, - "step": 44 - }, - { - "epoch": 0.011949017525225704, - "grad_norm": 14.729164979113321, - "learning_rate": 1.16710875331565e-06, - "loss": 2.2344, - "step": 45 - }, - { - "epoch": 0.012214551248008496, - "grad_norm": 26.318599853460583, - "learning_rate": 1.193633952254642e-06, - "loss": 3.8125, - "step": 46 - }, - { - "epoch": 0.01248008497079129, - "grad_norm": 36.435091476888246, - "learning_rate": 1.220159151193634e-06, - "loss": 3.5156, - "step": 47 - }, - { - "epoch": 0.012745618693574084, - "grad_norm": 21.353387655655027, - "learning_rate": 1.2466843501326261e-06, - "loss": 2.4219, - "step": 48 - }, - { - "epoch": 0.013011152416356878, - "grad_norm": 36.608824688888205, - "learning_rate": 1.273209549071618e-06, - "loss": 3.6562, - "step": 49 - }, - { - "epoch": 0.013276686139139671, - "grad_norm": 32.7469992104952, - "learning_rate": 1.29973474801061e-06, - "loss": 4.2812, - "step": 50 - }, - { - "epoch": 0.013542219861922463, - "grad_norm": 527.1528527718093, - "learning_rate": 1.3262599469496024e-06, - "loss": 2.4375, - "step": 51 - }, - { - "epoch": 0.013807753584705257, - "grad_norm": 27.990609419848134, - "learning_rate": 1.3527851458885943e-06, - "loss": 2.3906, - "step": 52 - }, - { - "epoch": 0.014073287307488051, - "grad_norm": 14.694389603614836, - "learning_rate": 1.3793103448275862e-06, - "loss": 3.2344, - "step": 53 - }, - { - "epoch": 0.014338821030270845, - "grad_norm": 20.735659157221477, - "learning_rate": 1.4058355437665782e-06, - "loss": 2.8281, - "step": 54 - }, - { - "epoch": 0.014604354753053638, - "grad_norm": 31.138348407935453, - "learning_rate": 1.4323607427055705e-06, - "loss": 3.8906, - "step": 55 - }, - { - "epoch": 0.01486988847583643, - "grad_norm": 29.62867209466913, - "learning_rate": 1.4588859416445625e-06, - "loss": 3.6875, - "step": 56 - }, - { - "epoch": 0.015135422198619224, - "grad_norm": 14.03880122581976, - "learning_rate": 1.4854111405835544e-06, - "loss": 2.1406, - "step": 57 - }, - { - "epoch": 0.015400955921402018, - "grad_norm": 33.23697742979803, - "learning_rate": 1.5119363395225464e-06, - "loss": 3.5, - "step": 58 - }, - { - "epoch": 0.015666489644184812, - "grad_norm": 29.453655249544546, - "learning_rate": 1.5384615384615387e-06, - "loss": 2.5312, - "step": 59 - }, - { - "epoch": 0.015932023366967606, - "grad_norm": 27.29098711329942, - "learning_rate": 1.5649867374005307e-06, - "loss": 2.6406, - "step": 60 - }, - { - "epoch": 0.0161975570897504, - "grad_norm": 28.33472292785293, - "learning_rate": 1.5915119363395226e-06, - "loss": 2.5, - "step": 61 - }, - { - "epoch": 0.016463090812533193, - "grad_norm": 16.521064455634754, - "learning_rate": 1.618037135278515e-06, - "loss": 2.4219, - "step": 62 - }, - { - "epoch": 0.016728624535315983, - "grad_norm": 13.384541738840948, - "learning_rate": 1.6445623342175069e-06, - "loss": 1.8672, - "step": 63 - }, - { - "epoch": 0.016994158258098777, - "grad_norm": 19.701703431707948, - "learning_rate": 1.6710875331564988e-06, - "loss": 2.4219, - "step": 64 - }, - { - "epoch": 0.01725969198088157, - "grad_norm": 32.143288806734, - "learning_rate": 1.6976127320954908e-06, - "loss": 2.6094, - "step": 65 - }, - { - "epoch": 0.017525225703664365, - "grad_norm": 30.63700955458724, - "learning_rate": 1.724137931034483e-06, - "loss": 2.8125, - "step": 66 - }, - { - "epoch": 0.01779075942644716, - "grad_norm": 18.61018137166964, - "learning_rate": 1.750663129973475e-06, - "loss": 2.125, - "step": 67 - }, - { - "epoch": 0.018056293149229952, - "grad_norm": 14.421577541639564, - "learning_rate": 1.777188328912467e-06, - "loss": 1.7578, - "step": 68 - }, - { - "epoch": 0.018321826872012746, - "grad_norm": 29.39406132029808, - "learning_rate": 1.803713527851459e-06, - "loss": 2.4375, - "step": 69 - }, - { - "epoch": 0.01858736059479554, - "grad_norm": 21.159681548926745, - "learning_rate": 1.830238726790451e-06, - "loss": 2.0938, - "step": 70 - }, - { - "epoch": 0.018852894317578334, - "grad_norm": 27.539564746628287, - "learning_rate": 1.8567639257294432e-06, - "loss": 3.0156, - "step": 71 - }, - { - "epoch": 0.019118428040361127, - "grad_norm": 24.190559734623005, - "learning_rate": 1.8832891246684352e-06, - "loss": 3.0, - "step": 72 - }, - { - "epoch": 0.019383961763143918, - "grad_norm": 26.96417513483311, - "learning_rate": 1.909814323607427e-06, - "loss": 3.0156, - "step": 73 - }, - { - "epoch": 0.01964949548592671, - "grad_norm": 24.89585126504584, - "learning_rate": 1.9363395225464193e-06, - "loss": 2.2344, - "step": 74 - }, - { - "epoch": 0.019915029208709505, - "grad_norm": 56.411148213900134, - "learning_rate": 1.9628647214854114e-06, - "loss": 2.1406, - "step": 75 - }, - { - "epoch": 0.0201805629314923, - "grad_norm": 26.867726567521345, - "learning_rate": 1.989389920424403e-06, - "loss": 1.5469, - "step": 76 - }, - { - "epoch": 0.020446096654275093, - "grad_norm": 25.736893839761308, - "learning_rate": 2.0159151193633957e-06, - "loss": 2.5312, - "step": 77 - }, - { - "epoch": 0.020711630377057887, - "grad_norm": 29.10383691197578, - "learning_rate": 2.0424403183023874e-06, - "loss": 2.0312, - "step": 78 - }, - { - "epoch": 0.02097716409984068, - "grad_norm": 16.34789260778447, - "learning_rate": 2.0689655172413796e-06, - "loss": 2.6719, - "step": 79 - }, - { - "epoch": 0.021242697822623474, - "grad_norm": 17.443495153034082, - "learning_rate": 2.0954907161803713e-06, - "loss": 0.7266, - "step": 80 - }, - { - "epoch": 0.021508231545406268, - "grad_norm": 25.598705341280716, - "learning_rate": 2.1220159151193635e-06, - "loss": 1.2109, - "step": 81 - }, - { - "epoch": 0.02177376526818906, - "grad_norm": 143.5401268253218, - "learning_rate": 2.1485411140583556e-06, - "loss": 1.6016, - "step": 82 - }, - { - "epoch": 0.022039298990971852, - "grad_norm": 26.389545474475735, - "learning_rate": 2.1750663129973478e-06, - "loss": 1.8438, - "step": 83 - }, - { - "epoch": 0.022304832713754646, - "grad_norm": 23.96871143922354, - "learning_rate": 2.2015915119363395e-06, - "loss": 0.9219, - "step": 84 - }, - { - "epoch": 0.02257036643653744, - "grad_norm": 38.19763740874266, - "learning_rate": 2.2281167108753316e-06, - "loss": 0.8359, - "step": 85 - }, - { - "epoch": 0.022835900159320233, - "grad_norm": 29.364774056584984, - "learning_rate": 2.2546419098143238e-06, - "loss": 0.6602, - "step": 86 - }, - { - "epoch": 0.023101433882103027, - "grad_norm": 26.845223667722713, - "learning_rate": 2.281167108753316e-06, - "loss": 0.8086, - "step": 87 - }, - { - "epoch": 0.02336696760488582, - "grad_norm": 28.831828873775432, - "learning_rate": 2.307692307692308e-06, - "loss": 1.4062, - "step": 88 - }, - { - "epoch": 0.023632501327668615, - "grad_norm": 24.13587932409368, - "learning_rate": 2.3342175066313e-06, - "loss": 1.3828, - "step": 89 - }, - { - "epoch": 0.02389803505045141, - "grad_norm": 24.636469804747414, - "learning_rate": 2.360742705570292e-06, - "loss": 1.0547, - "step": 90 - }, - { - "epoch": 0.024163568773234202, - "grad_norm": 10.515597346677467, - "learning_rate": 2.387267904509284e-06, - "loss": 0.0923, - "step": 91 - }, - { - "epoch": 0.024429102496016993, - "grad_norm": 22.66977819594809, - "learning_rate": 2.4137931034482762e-06, - "loss": 0.2832, - "step": 92 - }, - { - "epoch": 0.024694636218799786, - "grad_norm": 4.5274633525739185, - "learning_rate": 2.440318302387268e-06, - "loss": 0.0259, - "step": 93 - }, - { - "epoch": 0.02496016994158258, - "grad_norm": 9.36517984448498, - "learning_rate": 2.46684350132626e-06, - "loss": 0.0442, - "step": 94 - }, - { - "epoch": 0.025225703664365374, - "grad_norm": 11.897696771384094, - "learning_rate": 2.4933687002652523e-06, - "loss": 0.1138, - "step": 95 - }, - { - "epoch": 0.025491237387148168, - "grad_norm": 32.70173331678797, - "learning_rate": 2.5198938992042444e-06, - "loss": 6.4688, - "step": 96 - }, - { - "epoch": 0.02575677110993096, - "grad_norm": 0.031168975385739393, - "learning_rate": 2.546419098143236e-06, - "loss": 0.0003, - "step": 97 - }, - { - "epoch": 0.026022304832713755, - "grad_norm": 0.7313576810659675, - "learning_rate": 2.5729442970822283e-06, - "loss": 0.0075, - "step": 98 - }, - { - "epoch": 0.02628783855549655, - "grad_norm": 0.5685930769193894, - "learning_rate": 2.59946949602122e-06, - "loss": 0.0045, - "step": 99 - }, - { - "epoch": 0.026553372278279343, - "grad_norm": 4.772903853691814, - "learning_rate": 2.625994694960212e-06, - "loss": 0.2275, - "step": 100 - }, - { - "epoch": 0.026818906001062136, - "grad_norm": 5.102874825538071, - "learning_rate": 2.6525198938992047e-06, - "loss": 0.5703, - "step": 101 - }, - { - "epoch": 0.027084439723844927, - "grad_norm": 0.27402642551717465, - "learning_rate": 2.679045092838197e-06, - "loss": 0.0028, - "step": 102 - }, - { - "epoch": 0.02734997344662772, - "grad_norm": 4.6324372685319695, - "learning_rate": 2.7055702917771886e-06, - "loss": 0.1367, - "step": 103 - }, - { - "epoch": 0.027615507169410514, - "grad_norm": 0.038045626774934026, - "learning_rate": 2.7320954907161808e-06, - "loss": 0.0007, - "step": 104 - }, - { - "epoch": 0.027881040892193308, - "grad_norm": 0.6305735286653126, - "learning_rate": 2.7586206896551725e-06, - "loss": 0.0097, - "step": 105 - }, - { - "epoch": 0.028146574614976102, - "grad_norm": 0.019465351131816563, - "learning_rate": 2.7851458885941646e-06, - "loss": 0.0003, - "step": 106 - }, - { - "epoch": 0.028412108337758896, - "grad_norm": 0.03074317996513282, - "learning_rate": 2.8116710875331564e-06, - "loss": 0.0004, - "step": 107 - }, - { - "epoch": 0.02867764206054169, - "grad_norm": 4.503535246450842, - "learning_rate": 2.8381962864721485e-06, - "loss": 0.582, - "step": 108 - }, - { - "epoch": 0.028943175783324483, - "grad_norm": 4.8728912631764265, - "learning_rate": 2.864721485411141e-06, - "loss": 0.5469, - "step": 109 - }, - { - "epoch": 0.029208709506107277, - "grad_norm": 0.0095940645247496, - "learning_rate": 2.891246684350133e-06, - "loss": 0.0002, - "step": 110 - }, - { - "epoch": 0.02947424322889007, - "grad_norm": 5.105714132066514, - "learning_rate": 2.917771883289125e-06, - "loss": 0.7383, - "step": 111 - }, - { - "epoch": 0.02973977695167286, - "grad_norm": 0.18245285164952776, - "learning_rate": 2.944297082228117e-06, - "loss": 0.0027, - "step": 112 - }, - { - "epoch": 0.030005310674455655, - "grad_norm": 4.137453039540649, - "learning_rate": 2.970822281167109e-06, - "loss": 0.2158, - "step": 113 - }, - { - "epoch": 0.03027084439723845, - "grad_norm": 0.466146965565003, - "learning_rate": 2.997347480106101e-06, - "loss": 0.0085, - "step": 114 - }, - { - "epoch": 0.030536378120021242, - "grad_norm": 4.144598857842222, - "learning_rate": 3.0238726790450927e-06, - "loss": 0.4355, - "step": 115 - }, - { - "epoch": 0.030801911842804036, - "grad_norm": 0.07481190979762373, - "learning_rate": 3.0503978779840853e-06, - "loss": 0.0012, - "step": 116 - }, - { - "epoch": 0.03106744556558683, - "grad_norm": 4.864460397183156, - "learning_rate": 3.0769230769230774e-06, - "loss": 0.4883, - "step": 117 - }, - { - "epoch": 0.031332979288369624, - "grad_norm": 0.046629399654510524, - "learning_rate": 3.103448275862069e-06, - "loss": 0.0008, - "step": 118 - }, - { - "epoch": 0.031598513011152414, - "grad_norm": 0.9844013414494834, - "learning_rate": 3.1299734748010613e-06, - "loss": 0.0006, - "step": 119 - }, - { - "epoch": 0.03186404673393521, - "grad_norm": 2.8873700648345726, - "learning_rate": 3.1564986737400535e-06, - "loss": 0.0525, - "step": 120 - }, - { - "epoch": 0.032129580456718, - "grad_norm": 13.401562271488295, - "learning_rate": 3.183023872679045e-06, - "loss": 2.1094, - "step": 121 - }, - { - "epoch": 0.0323951141795008, - "grad_norm": 0.9753972945569677, - "learning_rate": 3.2095490716180373e-06, - "loss": 0.0198, - "step": 122 - }, - { - "epoch": 0.03266064790228359, - "grad_norm": 0.052264498037133865, - "learning_rate": 3.23607427055703e-06, - "loss": 0.0011, - "step": 123 - }, - { - "epoch": 0.032926181625066386, - "grad_norm": 0.0275297136646831, - "learning_rate": 3.2625994694960216e-06, - "loss": 0.0006, - "step": 124 - }, - { - "epoch": 0.03319171534784918, - "grad_norm": 0.1637629675751948, - "learning_rate": 3.2891246684350138e-06, - "loss": 0.0037, - "step": 125 - }, - { - "epoch": 0.03345724907063197, - "grad_norm": 4.78491469984592, - "learning_rate": 3.3156498673740055e-06, - "loss": 0.3535, - "step": 126 - }, - { - "epoch": 0.033722782793414764, - "grad_norm": 15.570720009635428, - "learning_rate": 3.3421750663129977e-06, - "loss": 2.7969, - "step": 127 - }, - { - "epoch": 0.033988316516197555, - "grad_norm": 4.505829037318201, - "learning_rate": 3.3687002652519894e-06, - "loss": 0.3105, - "step": 128 - }, - { - "epoch": 0.03425385023898035, - "grad_norm": 0.042401600725809944, - "learning_rate": 3.3952254641909815e-06, - "loss": 0.0009, - "step": 129 - }, - { - "epoch": 0.03451938396176314, - "grad_norm": 0.3272160377202937, - "learning_rate": 3.4217506631299737e-06, - "loss": 0.0074, - "step": 130 - }, - { - "epoch": 0.03478491768454594, - "grad_norm": 0.20468371949697137, - "learning_rate": 3.448275862068966e-06, - "loss": 0.0036, - "step": 131 - }, - { - "epoch": 0.03505045140732873, - "grad_norm": 4.559385816252051, - "learning_rate": 3.474801061007958e-06, - "loss": 0.2773, - "step": 132 - }, - { - "epoch": 0.03531598513011153, - "grad_norm": 0.44947961387998925, - "learning_rate": 3.50132625994695e-06, - "loss": 0.0093, - "step": 133 - }, - { - "epoch": 0.03558151885289432, - "grad_norm": 2.628443790962567, - "learning_rate": 3.527851458885942e-06, - "loss": 0.0767, - "step": 134 - }, - { - "epoch": 0.035847052575677114, - "grad_norm": 0.3344294580416105, - "learning_rate": 3.554376657824934e-06, - "loss": 0.0079, - "step": 135 - }, - { - "epoch": 0.036112586298459905, - "grad_norm": 6.1109142288253375, - "learning_rate": 3.5809018567639257e-06, - "loss": 0.1455, - "step": 136 - }, - { - "epoch": 0.036378120021242695, - "grad_norm": 0.19257798325675396, - "learning_rate": 3.607427055702918e-06, - "loss": 0.0043, - "step": 137 - }, - { - "epoch": 0.03664365374402549, - "grad_norm": 3.878928845456885, - "learning_rate": 3.6339522546419104e-06, - "loss": 0.1934, - "step": 138 - }, - { - "epoch": 0.03690918746680828, - "grad_norm": 1.5262893890482263, - "learning_rate": 3.660477453580902e-06, - "loss": 0.032, - "step": 139 - }, - { - "epoch": 0.03717472118959108, - "grad_norm": 0.19902989840213414, - "learning_rate": 3.6870026525198943e-06, - "loss": 0.0045, - "step": 140 - }, - { - "epoch": 0.03744025491237387, - "grad_norm": 0.5136742696527371, - "learning_rate": 3.7135278514588865e-06, - "loss": 0.0104, - "step": 141 - }, - { - "epoch": 0.03770578863515667, - "grad_norm": 3.86476349919558, - "learning_rate": 3.740053050397878e-06, - "loss": 0.1367, - "step": 142 - }, - { - "epoch": 0.03797132235793946, - "grad_norm": 0.796785610921372, - "learning_rate": 3.7665782493368703e-06, - "loss": 0.0194, - "step": 143 - }, - { - "epoch": 0.038236856080722255, - "grad_norm": 1.5271845218247893, - "learning_rate": 3.793103448275862e-06, - "loss": 0.0288, - "step": 144 - }, - { - "epoch": 0.038502389803505045, - "grad_norm": 2.427314819468491, - "learning_rate": 3.819628647214854e-06, - "loss": 0.0576, - "step": 145 - }, - { - "epoch": 0.038767923526287836, - "grad_norm": 4.117849401975011, - "learning_rate": 3.846153846153847e-06, - "loss": 0.2441, - "step": 146 - }, - { - "epoch": 0.03903345724907063, - "grad_norm": 0.15723067876063995, - "learning_rate": 3.8726790450928385e-06, - "loss": 0.0039, - "step": 147 - }, - { - "epoch": 0.03929899097185342, - "grad_norm": 3.7869596148640228, - "learning_rate": 3.89920424403183e-06, - "loss": 0.1641, - "step": 148 - }, - { - "epoch": 0.03956452469463622, - "grad_norm": 0.37297101582033887, - "learning_rate": 3.925729442970823e-06, - "loss": 0.0067, - "step": 149 - }, - { - "epoch": 0.03983005841741901, - "grad_norm": 3.6378474369832037, - "learning_rate": 3.9522546419098145e-06, - "loss": 0.0957, - "step": 150 - }, - { - "epoch": 0.04009559214020181, - "grad_norm": 0.43825791965553007, - "learning_rate": 3.978779840848806e-06, - "loss": 0.0105, - "step": 151 - }, - { - "epoch": 0.0403611258629846, - "grad_norm": 3.833802266035273, - "learning_rate": 4.005305039787799e-06, - "loss": 0.1934, - "step": 152 - }, - { - "epoch": 0.040626659585767395, - "grad_norm": 0.27648447133818854, - "learning_rate": 4.031830238726791e-06, - "loss": 0.0059, - "step": 153 - }, - { - "epoch": 0.040892193308550186, - "grad_norm": 0.5502618375049242, - "learning_rate": 4.058355437665783e-06, - "loss": 0.0117, - "step": 154 - }, - { - "epoch": 0.041157727031332976, - "grad_norm": 0.3759989484991633, - "learning_rate": 4.084880636604775e-06, - "loss": 0.0093, - "step": 155 - }, - { - "epoch": 0.04142326075411577, - "grad_norm": 0.15725269535355169, - "learning_rate": 4.111405835543767e-06, - "loss": 0.0033, - "step": 156 - }, - { - "epoch": 0.041688794476898564, - "grad_norm": 0.5545627730013464, - "learning_rate": 4.137931034482759e-06, - "loss": 0.0137, - "step": 157 - }, - { - "epoch": 0.04195432819968136, - "grad_norm": 0.3259056057700917, - "learning_rate": 4.164456233421751e-06, - "loss": 0.0081, - "step": 158 - }, - { - "epoch": 0.04221986192246415, - "grad_norm": 0.1865224020504324, - "learning_rate": 4.190981432360743e-06, - "loss": 0.0041, - "step": 159 - }, - { - "epoch": 0.04248539564524695, - "grad_norm": 1.8307778726247308, - "learning_rate": 4.217506631299735e-06, - "loss": 0.0532, - "step": 160 - }, - { - "epoch": 0.04275092936802974, - "grad_norm": 3.8899387815695117, - "learning_rate": 4.244031830238727e-06, - "loss": 0.1553, - "step": 161 - }, - { - "epoch": 0.043016463090812536, - "grad_norm": 0.35213110395365343, - "learning_rate": 4.2705570291777195e-06, - "loss": 0.0063, - "step": 162 - }, - { - "epoch": 0.043281996813595326, - "grad_norm": 0.12739793280337483, - "learning_rate": 4.297082228116711e-06, - "loss": 0.0029, - "step": 163 - }, - { - "epoch": 0.04354753053637812, - "grad_norm": 3.7917485475147767, - "learning_rate": 4.323607427055703e-06, - "loss": 0.2559, - "step": 164 - }, - { - "epoch": 0.043813064259160914, - "grad_norm": 4.222145018176052, - "learning_rate": 4.3501326259946955e-06, - "loss": 0.3105, - "step": 165 - }, - { - "epoch": 0.044078597981943704, - "grad_norm": 0.1510587971806312, - "learning_rate": 4.376657824933687e-06, - "loss": 0.0036, - "step": 166 - }, - { - "epoch": 0.0443441317047265, - "grad_norm": 0.3746365601473909, - "learning_rate": 4.403183023872679e-06, - "loss": 0.0091, - "step": 167 - }, - { - "epoch": 0.04460966542750929, - "grad_norm": 3.6742314366524482, - "learning_rate": 4.4297082228116715e-06, - "loss": 0.1641, - "step": 168 - }, - { - "epoch": 0.04487519915029209, - "grad_norm": 4.169049834925408, - "learning_rate": 4.456233421750663e-06, - "loss": 0.1367, - "step": 169 - }, - { - "epoch": 0.04514073287307488, - "grad_norm": 23.2584513485522, - "learning_rate": 4.482758620689656e-06, - "loss": 2.375, - "step": 170 - }, - { - "epoch": 0.045406266595857676, - "grad_norm": 3.6642006588717737, - "learning_rate": 4.5092838196286476e-06, - "loss": 0.1465, - "step": 171 - }, - { - "epoch": 0.04567180031864047, - "grad_norm": 0.2890573481827513, - "learning_rate": 4.535809018567639e-06, - "loss": 0.0065, - "step": 172 - }, - { - "epoch": 0.045937334041423264, - "grad_norm": 3.68276451135959, - "learning_rate": 4.562334217506632e-06, - "loss": 0.1836, - "step": 173 - }, - { - "epoch": 0.046202867764206054, - "grad_norm": 2.4177768079368986, - "learning_rate": 4.588859416445624e-06, - "loss": 0.0479, - "step": 174 - }, - { - "epoch": 0.046468401486988845, - "grad_norm": 17.645780333783485, - "learning_rate": 4.615384615384616e-06, - "loss": 1.9531, - "step": 175 - }, - { - "epoch": 0.04673393520977164, - "grad_norm": 0.9556851330908339, - "learning_rate": 4.641909814323608e-06, - "loss": 0.0231, - "step": 176 - }, - { - "epoch": 0.04699946893255443, - "grad_norm": 0.15825725091826115, - "learning_rate": 4.6684350132626e-06, - "loss": 0.0035, - "step": 177 - }, - { - "epoch": 0.04726500265533723, - "grad_norm": 5.0505715269377065, - "learning_rate": 4.694960212201592e-06, - "loss": 0.0219, - "step": 178 - }, - { - "epoch": 0.04753053637812002, - "grad_norm": 0.19812718466796467, - "learning_rate": 4.721485411140584e-06, - "loss": 0.0051, - "step": 179 - }, - { - "epoch": 0.04779607010090282, - "grad_norm": 3.6161442909421218, - "learning_rate": 4.748010610079576e-06, - "loss": 0.165, - "step": 180 - }, - { - "epoch": 0.04806160382368561, - "grad_norm": 0.322834845757156, - "learning_rate": 4.774535809018568e-06, - "loss": 0.0065, - "step": 181 - }, - { - "epoch": 0.048327137546468404, - "grad_norm": 5.4547556811669935, - "learning_rate": 4.80106100795756e-06, - "loss": 2.125, - "step": 182 - }, - { - "epoch": 0.048592671269251195, - "grad_norm": 3.584757373914313, - "learning_rate": 4.8275862068965525e-06, - "loss": 0.1553, - "step": 183 - }, - { - "epoch": 0.048858204992033985, - "grad_norm": 0.7190093535962121, - "learning_rate": 4.854111405835544e-06, - "loss": 0.0187, - "step": 184 - }, - { - "epoch": 0.04912373871481678, - "grad_norm": 8.285280730208004, - "learning_rate": 4.880636604774536e-06, - "loss": 2.0156, - "step": 185 - }, - { - "epoch": 0.04938927243759957, - "grad_norm": 0.1589100928383723, - "learning_rate": 4.9071618037135285e-06, - "loss": 0.0035, - "step": 186 - }, - { - "epoch": 0.04965480616038237, - "grad_norm": 1.9461569166969948, - "learning_rate": 4.93368700265252e-06, - "loss": 0.0479, - "step": 187 - }, - { - "epoch": 0.04992033988316516, - "grad_norm": 0.28568945003091645, - "learning_rate": 4.960212201591512e-06, - "loss": 0.0058, - "step": 188 - }, - { - "epoch": 0.05018587360594796, - "grad_norm": 0.19319543123307042, - "learning_rate": 4.9867374005305045e-06, - "loss": 0.0033, - "step": 189 - }, - { - "epoch": 0.05045140732873075, - "grad_norm": 4.037258353685831, - "learning_rate": 5.013262599469496e-06, - "loss": 0.0815, - "step": 190 - }, - { - "epoch": 0.050716941051513545, - "grad_norm": 1.3356221226261589, - "learning_rate": 5.039787798408489e-06, - "loss": 0.0354, - "step": 191 - }, - { - "epoch": 0.050982474774296335, - "grad_norm": 0.1480129388225318, - "learning_rate": 5.06631299734748e-06, - "loss": 0.0026, - "step": 192 - }, - { - "epoch": 0.05124800849707913, - "grad_norm": 3.5661629795250045, - "learning_rate": 5.092838196286472e-06, - "loss": 0.2441, - "step": 193 - }, - { - "epoch": 0.05151354221986192, - "grad_norm": 2.1523745771682172, - "learning_rate": 5.119363395225465e-06, - "loss": 0.0576, - "step": 194 - }, - { - "epoch": 0.05177907594264471, - "grad_norm": 0.9192144869318637, - "learning_rate": 5.145888594164457e-06, - "loss": 0.0208, - "step": 195 - }, - { - "epoch": 0.05204460966542751, - "grad_norm": 0.2690072757879695, - "learning_rate": 5.172413793103449e-06, - "loss": 0.0064, - "step": 196 - }, - { - "epoch": 0.0523101433882103, - "grad_norm": 2.7717636534246792, - "learning_rate": 5.19893899204244e-06, - "loss": 0.0884, - "step": 197 - }, - { - "epoch": 0.0525756771109931, - "grad_norm": 7.0745463915342635, - "learning_rate": 5.225464190981433e-06, - "loss": 2.0156, - "step": 198 - }, - { - "epoch": 0.05284121083377589, - "grad_norm": 0.19227286962350826, - "learning_rate": 5.251989389920424e-06, - "loss": 0.0045, - "step": 199 - }, - { - "epoch": 0.053106744556558685, - "grad_norm": 0.8537355801992382, - "learning_rate": 5.278514588859417e-06, - "loss": 0.0069, - "step": 200 - }, - { - "epoch": 0.053372278279341476, - "grad_norm": 0.11929159424700997, - "learning_rate": 5.3050397877984095e-06, - "loss": 0.0025, - "step": 201 - }, - { - "epoch": 0.05363781200212427, - "grad_norm": 0.07790410641989416, - "learning_rate": 5.331564986737401e-06, - "loss": 0.0013, - "step": 202 - }, - { - "epoch": 0.05390334572490706, - "grad_norm": 0.13268954955742412, - "learning_rate": 5.358090185676394e-06, - "loss": 0.0032, - "step": 203 - }, - { - "epoch": 0.054168879447689854, - "grad_norm": 4.439535441327748, - "learning_rate": 5.384615384615385e-06, - "loss": 0.1836, - "step": 204 - }, - { - "epoch": 0.05443441317047265, - "grad_norm": 0.2688219627795949, - "learning_rate": 5.411140583554377e-06, - "loss": 0.0047, - "step": 205 - }, - { - "epoch": 0.05469994689325544, - "grad_norm": 0.08483073451016193, - "learning_rate": 5.437665782493369e-06, - "loss": 0.0019, - "step": 206 - }, - { - "epoch": 0.05496548061603824, - "grad_norm": 4.028626588799805, - "learning_rate": 5.4641909814323615e-06, - "loss": 0.3203, - "step": 207 - }, - { - "epoch": 0.05523101433882103, - "grad_norm": 2.4610886737988866, - "learning_rate": 5.490716180371354e-06, - "loss": 0.0688, - "step": 208 - }, - { - "epoch": 0.055496548061603826, - "grad_norm": 1.133315536600772, - "learning_rate": 5.517241379310345e-06, - "loss": 0.0258, - "step": 209 - }, - { - "epoch": 0.055762081784386616, - "grad_norm": 0.7726546713585315, - "learning_rate": 5.5437665782493376e-06, - "loss": 0.0152, - "step": 210 - }, - { - "epoch": 0.05602761550716941, - "grad_norm": 0.38371963709194457, - "learning_rate": 5.570291777188329e-06, - "loss": 0.0092, - "step": 211 - }, - { - "epoch": 0.056293149229952204, - "grad_norm": 0.13225755055257674, - "learning_rate": 5.596816976127322e-06, - "loss": 0.0023, - "step": 212 - }, - { - "epoch": 0.056558682952734994, - "grad_norm": 0.08917614772102375, - "learning_rate": 5.623342175066313e-06, - "loss": 0.002, - "step": 213 - }, - { - "epoch": 0.05682421667551779, - "grad_norm": 4.854086836177229, - "learning_rate": 5.649867374005305e-06, - "loss": 0.1035, - "step": 214 - }, - { - "epoch": 0.05708975039830058, - "grad_norm": 0.04007293390414322, - "learning_rate": 5.676392572944297e-06, - "loss": 0.0007, - "step": 215 - }, - { - "epoch": 0.05735528412108338, - "grad_norm": 0.2848845451163088, - "learning_rate": 5.70291777188329e-06, - "loss": 0.0064, - "step": 216 - }, - { - "epoch": 0.05762081784386617, - "grad_norm": 4.686394489350751, - "learning_rate": 5.729442970822282e-06, - "loss": 0.2246, - "step": 217 - }, - { - "epoch": 0.057886351566648966, - "grad_norm": 0.11120455969442722, - "learning_rate": 5.755968169761274e-06, - "loss": 0.0029, - "step": 218 - }, - { - "epoch": 0.05815188528943176, - "grad_norm": 4.663386143892064, - "learning_rate": 5.782493368700266e-06, - "loss": 0.1641, - "step": 219 - }, - { - "epoch": 0.058417419012214554, - "grad_norm": 0.24512762915181402, - "learning_rate": 5.809018567639257e-06, - "loss": 0.0036, - "step": 220 - }, - { - "epoch": 0.058682952734997344, - "grad_norm": 4.183804090865918, - "learning_rate": 5.83554376657825e-06, - "loss": 0.2148, - "step": 221 - }, - { - "epoch": 0.05894848645778014, - "grad_norm": 0.06396150026208348, - "learning_rate": 5.862068965517242e-06, - "loss": 0.0012, - "step": 222 - }, - { - "epoch": 0.05921402018056293, - "grad_norm": 0.1942578634333766, - "learning_rate": 5.888594164456234e-06, - "loss": 0.0037, - "step": 223 - }, - { - "epoch": 0.05947955390334572, - "grad_norm": 0.07594022266578823, - "learning_rate": 5.915119363395227e-06, - "loss": 0.002, - "step": 224 - }, - { - "epoch": 0.05974508762612852, - "grad_norm": 0.35116426830013603, - "learning_rate": 5.941644562334218e-06, - "loss": 0.0074, - "step": 225 - }, - { - "epoch": 0.06001062134891131, - "grad_norm": 1.578167800096405, - "learning_rate": 5.96816976127321e-06, - "loss": 0.0356, - "step": 226 - }, - { - "epoch": 0.06027615507169411, - "grad_norm": 1.1285595351705588, - "learning_rate": 5.994694960212202e-06, - "loss": 0.0255, - "step": 227 - }, - { - "epoch": 0.0605416887944769, - "grad_norm": 4.241743427696131, - "learning_rate": 6.0212201591511945e-06, - "loss": 0.4551, - "step": 228 - }, - { - "epoch": 0.060807222517259694, - "grad_norm": 7.930922977922742, - "learning_rate": 6.0477453580901854e-06, - "loss": 1.9453, - "step": 229 - }, - { - "epoch": 0.061072756240042485, - "grad_norm": 4.1445314479670925, - "learning_rate": 6.074270557029178e-06, - "loss": 0.4766, - "step": 230 - }, - { - "epoch": 0.06133828996282528, - "grad_norm": 0.022757441023624224, - "learning_rate": 6.1007957559681706e-06, - "loss": 0.0006, - "step": 231 - }, - { - "epoch": 0.06160382368560807, - "grad_norm": 0.04301908301005633, - "learning_rate": 6.127320954907162e-06, - "loss": 0.0012, - "step": 232 - }, - { - "epoch": 0.06186935740839086, - "grad_norm": 3.9993964088934497, - "learning_rate": 6.153846153846155e-06, - "loss": 0.5352, - "step": 233 - }, - { - "epoch": 0.06213489113117366, - "grad_norm": 0.0384831919189867, - "learning_rate": 6.180371352785146e-06, - "loss": 0.0011, - "step": 234 - }, - { - "epoch": 0.06240042485395645, - "grad_norm": 0.11825509016660113, - "learning_rate": 6.206896551724138e-06, - "loss": 0.0033, - "step": 235 - }, - { - "epoch": 0.06266595857673925, - "grad_norm": 0.038756173266160945, - "learning_rate": 6.23342175066313e-06, - "loss": 0.0011, - "step": 236 - }, - { - "epoch": 0.06293149229952204, - "grad_norm": 0.0527621505367311, - "learning_rate": 6.259946949602123e-06, - "loss": 0.0016, - "step": 237 - }, - { - "epoch": 0.06319702602230483, - "grad_norm": 3.4032460542743235, - "learning_rate": 6.286472148541115e-06, - "loss": 0.4199, - "step": 238 - }, - { - "epoch": 0.06346255974508763, - "grad_norm": 1.5026047777466172, - "learning_rate": 6.312997347480107e-06, - "loss": 0.006, - "step": 239 - }, - { - "epoch": 0.06372809346787042, - "grad_norm": 0.09772198768152848, - "learning_rate": 6.339522546419099e-06, - "loss": 0.0028, - "step": 240 - }, - { - "epoch": 0.06399362719065321, - "grad_norm": 3.4974570386194848, - "learning_rate": 6.36604774535809e-06, - "loss": 0.3203, - "step": 241 - }, - { - "epoch": 0.064259160913436, - "grad_norm": 0.05852008470517513, - "learning_rate": 6.392572944297083e-06, - "loss": 0.0017, - "step": 242 - }, - { - "epoch": 0.0645246946362188, - "grad_norm": 0.6206245208833137, - "learning_rate": 6.419098143236075e-06, - "loss": 0.0187, - "step": 243 - }, - { - "epoch": 0.0647902283590016, - "grad_norm": 3.257984380379068, - "learning_rate": 6.445623342175067e-06, - "loss": 0.2988, - "step": 244 - }, - { - "epoch": 0.06505576208178439, - "grad_norm": 3.129808256412029, - "learning_rate": 6.47214854111406e-06, - "loss": 0.3672, - "step": 245 - }, - { - "epoch": 0.06532129580456718, - "grad_norm": 1.0556539855013083, - "learning_rate": 6.498673740053051e-06, - "loss": 0.0258, - "step": 246 - }, - { - "epoch": 0.06558682952734997, - "grad_norm": 2.2148968544822747, - "learning_rate": 6.525198938992043e-06, - "loss": 0.1367, - "step": 247 - }, - { - "epoch": 0.06585236325013277, - "grad_norm": 0.3150693176270268, - "learning_rate": 6.551724137931035e-06, - "loss": 0.0103, - "step": 248 - }, - { - "epoch": 0.06611789697291556, - "grad_norm": 0.9150893306492274, - "learning_rate": 6.5782493368700276e-06, - "loss": 0.0356, - "step": 249 - }, - { - "epoch": 0.06638343069569835, - "grad_norm": 0.28640145943412554, - "learning_rate": 6.6047745358090184e-06, - "loss": 0.0092, - "step": 250 - }, - { - "epoch": 0.06664896441848114, - "grad_norm": 0.463345992401992, - "learning_rate": 6.631299734748011e-06, - "loss": 0.0165, - "step": 251 - }, - { - "epoch": 0.06691449814126393, - "grad_norm": 1.1399589305877227, - "learning_rate": 6.657824933687003e-06, - "loss": 0.0393, - "step": 252 - }, - { - "epoch": 0.06718003186404674, - "grad_norm": 2.377572235801305, - "learning_rate": 6.684350132625995e-06, - "loss": 0.1641, - "step": 253 - }, - { - "epoch": 0.06744556558682953, - "grad_norm": 0.20240560295679277, - "learning_rate": 6.710875331564988e-06, - "loss": 0.0053, - "step": 254 - }, - { - "epoch": 0.06771109930961232, - "grad_norm": 0.5580660153168894, - "learning_rate": 6.737400530503979e-06, - "loss": 0.015, - "step": 255 - }, - { - "epoch": 0.06797663303239511, - "grad_norm": 0.34949012939902974, - "learning_rate": 6.763925729442971e-06, - "loss": 0.0104, - "step": 256 - }, - { - "epoch": 0.06824216675517791, - "grad_norm": 1.5003862236593613, - "learning_rate": 6.790450928381963e-06, - "loss": 0.0688, - "step": 257 - }, - { - "epoch": 0.0685077004779607, - "grad_norm": 3.1203129924267152, - "learning_rate": 6.816976127320956e-06, - "loss": 0.2246, - "step": 258 - }, - { - "epoch": 0.0687732342007435, - "grad_norm": 1.051643999026055, - "learning_rate": 6.843501326259947e-06, - "loss": 0.0391, - "step": 259 - }, - { - "epoch": 0.06903876792352628, - "grad_norm": 0.19030670002007702, - "learning_rate": 6.87002652519894e-06, - "loss": 0.0058, - "step": 260 - }, - { - "epoch": 0.06930430164630907, - "grad_norm": 0.6772208181131325, - "learning_rate": 6.896551724137932e-06, - "loss": 0.0206, - "step": 261 - }, - { - "epoch": 0.06956983536909188, - "grad_norm": 2.2887370072088564, - "learning_rate": 6.923076923076923e-06, - "loss": 0.1035, - "step": 262 - }, - { - "epoch": 0.06983536909187467, - "grad_norm": 0.6143154696309661, - "learning_rate": 6.949602122015916e-06, - "loss": 0.0187, - "step": 263 - }, - { - "epoch": 0.07010090281465746, - "grad_norm": 2.9212908220956946, - "learning_rate": 6.976127320954908e-06, - "loss": 0.1641, - "step": 264 - }, - { - "epoch": 0.07036643653744025, - "grad_norm": 0.05930635612845523, - "learning_rate": 7.0026525198939e-06, - "loss": 0.0016, - "step": 265 - }, - { - "epoch": 0.07063197026022305, - "grad_norm": 0.21269981419083292, - "learning_rate": 7.029177718832891e-06, - "loss": 0.0045, - "step": 266 - }, - { - "epoch": 0.07089750398300584, - "grad_norm": 3.3491904755168838, - "learning_rate": 7.055702917771884e-06, - "loss": 0.2461, - "step": 267 - }, - { - "epoch": 0.07116303770578863, - "grad_norm": 3.011992184448476, - "learning_rate": 7.082228116710876e-06, - "loss": 0.2246, - "step": 268 - }, - { - "epoch": 0.07142857142857142, - "grad_norm": 3.317414315449119, - "learning_rate": 7.108753315649868e-06, - "loss": 0.2344, - "step": 269 - }, - { - "epoch": 0.07169410515135423, - "grad_norm": 0.3112825686788188, - "learning_rate": 7.1352785145888606e-06, - "loss": 0.0093, - "step": 270 - }, - { - "epoch": 0.07195963887413702, - "grad_norm": 0.05440553438613368, - "learning_rate": 7.1618037135278515e-06, - "loss": 0.0017, - "step": 271 - }, - { - "epoch": 0.07222517259691981, - "grad_norm": 2.1971208118181416, - "learning_rate": 7.188328912466844e-06, - "loss": 0.1367, - "step": 272 - }, - { - "epoch": 0.0724907063197026, - "grad_norm": 12.646903729517486, - "learning_rate": 7.214854111405836e-06, - "loss": 1.625, - "step": 273 - }, - { - "epoch": 0.07275624004248539, - "grad_norm": 0.5134455047996075, - "learning_rate": 7.241379310344828e-06, - "loss": 0.0166, - "step": 274 - }, - { - "epoch": 0.0730217737652682, - "grad_norm": 0.7126267670034715, - "learning_rate": 7.267904509283821e-06, - "loss": 0.0288, - "step": 275 - }, - { - "epoch": 0.07328730748805098, - "grad_norm": 2.247754974205674, - "learning_rate": 7.294429708222812e-06, - "loss": 0.1367, - "step": 276 - }, - { - "epoch": 0.07355284121083377, - "grad_norm": 2.0961429578052546, - "learning_rate": 7.320954907161804e-06, - "loss": 0.0967, - "step": 277 - }, - { - "epoch": 0.07381837493361657, - "grad_norm": 1.5658452401665721, - "learning_rate": 7.347480106100796e-06, - "loss": 0.0757, - "step": 278 - }, - { - "epoch": 0.07408390865639937, - "grad_norm": 1.3474463383395376, - "learning_rate": 7.374005305039789e-06, - "loss": 0.0635, - "step": 279 - }, - { - "epoch": 0.07434944237918216, - "grad_norm": 0.9079733138650365, - "learning_rate": 7.40053050397878e-06, - "loss": 0.0354, - "step": 280 - }, - { - "epoch": 0.07461497610196495, - "grad_norm": 1.423025471379327, - "learning_rate": 7.427055702917773e-06, - "loss": 0.0579, - "step": 281 - }, - { - "epoch": 0.07488050982474774, - "grad_norm": 29.742858003866257, - "learning_rate": 7.453580901856765e-06, - "loss": 2.2188, - "step": 282 - }, - { - "epoch": 0.07514604354753053, - "grad_norm": 1.1822814883714525, - "learning_rate": 7.480106100795756e-06, - "loss": 0.0579, - "step": 283 - }, - { - "epoch": 0.07541157727031333, - "grad_norm": 1.183988881108207, - "learning_rate": 7.506631299734749e-06, - "loss": 0.053, - "step": 284 - }, - { - "epoch": 0.07567711099309612, - "grad_norm": 1.2925771262928738, - "learning_rate": 7.533156498673741e-06, - "loss": 0.0579, - "step": 285 - }, - { - "epoch": 0.07594264471587892, - "grad_norm": 0.6760998544572231, - "learning_rate": 7.559681697612733e-06, - "loss": 0.0208, - "step": 286 - }, - { - "epoch": 0.0762081784386617, - "grad_norm": 1.0282421815983582, - "learning_rate": 7.586206896551724e-06, - "loss": 0.0396, - "step": 287 - }, - { - "epoch": 0.07647371216144451, - "grad_norm": 0.4234422853011549, - "learning_rate": 7.612732095490717e-06, - "loss": 0.015, - "step": 288 - }, - { - "epoch": 0.0767392458842273, - "grad_norm": 0.39859873725327066, - "learning_rate": 7.639257294429708e-06, - "loss": 0.0135, - "step": 289 - }, - { - "epoch": 0.07700477960701009, - "grad_norm": 0.16222305800718434, - "learning_rate": 7.6657824933687e-06, - "loss": 0.0052, - "step": 290 - }, - { - "epoch": 0.07727031332979288, - "grad_norm": 0.11630227304441901, - "learning_rate": 7.692307692307694e-06, - "loss": 0.0038, - "step": 291 - }, - { - "epoch": 0.07753584705257567, - "grad_norm": 0.03372528156970035, - "learning_rate": 7.718832891246685e-06, - "loss": 0.0008, - "step": 292 - }, - { - "epoch": 0.07780138077535848, - "grad_norm": 0.051940974929390066, - "learning_rate": 7.745358090185677e-06, - "loss": 0.0015, - "step": 293 - }, - { - "epoch": 0.07806691449814127, - "grad_norm": 3.0783353845274295, - "learning_rate": 7.771883289124669e-06, - "loss": 0.2988, - "step": 294 - }, - { - "epoch": 0.07833244822092406, - "grad_norm": 3.19288706413644, - "learning_rate": 7.79840848806366e-06, - "loss": 1.5, - "step": 295 - }, - { - "epoch": 0.07859798194370685, - "grad_norm": 0.043571411993144044, - "learning_rate": 7.824933687002652e-06, - "loss": 0.0012, - "step": 296 - }, - { - "epoch": 0.07886351566648965, - "grad_norm": 0.022251985441288053, - "learning_rate": 7.851458885941646e-06, - "loss": 0.0006, - "step": 297 - }, - { - "epoch": 0.07912904938927244, - "grad_norm": 0.08604971258954734, - "learning_rate": 7.877984084880637e-06, - "loss": 0.002, - "step": 298 - }, - { - "epoch": 0.07939458311205523, - "grad_norm": 0.019663492930382675, - "learning_rate": 7.904509283819629e-06, - "loss": 0.0007, - "step": 299 - }, - { - "epoch": 0.07966011683483802, - "grad_norm": 4.060927451869871, - "learning_rate": 7.93103448275862e-06, - "loss": 0.5469, - "step": 300 - }, - { - "epoch": 0.07992565055762081, - "grad_norm": 3.7022259212796285, - "learning_rate": 7.957559681697613e-06, - "loss": 0.5, - "step": 301 - }, - { - "epoch": 0.08019118428040362, - "grad_norm": 0.009537116683502751, - "learning_rate": 7.984084880636606e-06, - "loss": 0.0002, - "step": 302 - }, - { - "epoch": 0.0804567180031864, - "grad_norm": 0.15972134226047616, - "learning_rate": 8.010610079575598e-06, - "loss": 0.0013, - "step": 303 - }, - { - "epoch": 0.0807222517259692, - "grad_norm": 3.667758151535006, - "learning_rate": 8.03713527851459e-06, - "loss": 0.3203, - "step": 304 - }, - { - "epoch": 0.08098778544875199, - "grad_norm": 0.11182970657148031, - "learning_rate": 8.063660477453583e-06, - "loss": 0.0036, - "step": 305 - }, - { - "epoch": 0.08125331917153479, - "grad_norm": 2.9682235846476854, - "learning_rate": 8.090185676392573e-06, - "loss": 0.3203, - "step": 306 - }, - { - "epoch": 0.08151885289431758, - "grad_norm": 2.6544777345452233, - "learning_rate": 8.116710875331566e-06, - "loss": 0.2148, - "step": 307 - }, - { - "epoch": 0.08178438661710037, - "grad_norm": 2.23551129108633, - "learning_rate": 8.143236074270558e-06, - "loss": 0.2129, - "step": 308 - }, - { - "epoch": 0.08204992033988316, - "grad_norm": 12.154843924500748, - "learning_rate": 8.16976127320955e-06, - "loss": 1.6797, - "step": 309 - }, - { - "epoch": 0.08231545406266595, - "grad_norm": 0.15211428151282252, - "learning_rate": 8.196286472148541e-06, - "loss": 0.006, - "step": 310 - }, - { - "epoch": 0.08258098778544876, - "grad_norm": 1.8572071183861802, - "learning_rate": 8.222811671087533e-06, - "loss": 0.1641, - "step": 311 - }, - { - "epoch": 0.08284652150823155, - "grad_norm": 0.04756323667504141, - "learning_rate": 8.249336870026527e-06, - "loss": 0.0015, - "step": 312 - }, - { - "epoch": 0.08311205523101434, - "grad_norm": 1.2740195810197956, - "learning_rate": 8.275862068965518e-06, - "loss": 0.0752, - "step": 313 - }, - { - "epoch": 0.08337758895379713, - "grad_norm": 2.2505964862303536, - "learning_rate": 8.30238726790451e-06, - "loss": 0.1553, - "step": 314 - }, - { - "epoch": 0.08364312267657993, - "grad_norm": 1.2815168496791627, - "learning_rate": 8.328912466843502e-06, - "loss": 0.0359, - "step": 315 - }, - { - "epoch": 0.08390865639936272, - "grad_norm": 0.6640260863661916, - "learning_rate": 8.355437665782494e-06, - "loss": 0.0356, - "step": 316 - }, - { - "epoch": 0.08417419012214551, - "grad_norm": 1.5201761587838876, - "learning_rate": 8.381962864721485e-06, - "loss": 0.1123, - "step": 317 - }, - { - "epoch": 0.0844397238449283, - "grad_norm": 8.490220248388562, - "learning_rate": 8.408488063660479e-06, - "loss": 1.625, - "step": 318 - }, - { - "epoch": 0.08470525756771109, - "grad_norm": 1.220932640130876, - "learning_rate": 8.43501326259947e-06, - "loss": 0.0762, - "step": 319 - }, - { - "epoch": 0.0849707912904939, - "grad_norm": 1.5989309966218423, - "learning_rate": 8.461538461538462e-06, - "loss": 0.1191, - "step": 320 - }, - { - "epoch": 0.08523632501327669, - "grad_norm": 0.9427180577076081, - "learning_rate": 8.488063660477454e-06, - "loss": 0.0583, - "step": 321 - }, - { - "epoch": 0.08550185873605948, - "grad_norm": 0.5733118835063594, - "learning_rate": 8.514588859416446e-06, - "loss": 0.0293, - "step": 322 - }, - { - "epoch": 0.08576739245884227, - "grad_norm": 0.14144397139040923, - "learning_rate": 8.541114058355439e-06, - "loss": 0.0066, - "step": 323 - }, - { - "epoch": 0.08603292618162507, - "grad_norm": 2.2159972152363077, - "learning_rate": 8.56763925729443e-06, - "loss": 0.2676, - "step": 324 - }, - { - "epoch": 0.08629845990440786, - "grad_norm": 1.6089684506296742, - "learning_rate": 8.594164456233422e-06, - "loss": 0.165, - "step": 325 - }, - { - "epoch": 0.08656399362719065, - "grad_norm": 1.907931611057086, - "learning_rate": 8.620689655172414e-06, - "loss": 0.2246, - "step": 326 - }, - { - "epoch": 0.08682952734997344, - "grad_norm": 1.3090038245570277, - "learning_rate": 8.647214854111406e-06, - "loss": 0.0894, - "step": 327 - }, - { - "epoch": 0.08709506107275625, - "grad_norm": 0.32857189533914255, - "learning_rate": 8.6737400530504e-06, - "loss": 0.0167, - "step": 328 - }, - { - "epoch": 0.08736059479553904, - "grad_norm": 1.6452882577007435, - "learning_rate": 8.700265251989391e-06, - "loss": 0.1934, - "step": 329 - }, - { - "epoch": 0.08762612851832183, - "grad_norm": 0.9569297269958028, - "learning_rate": 8.726790450928383e-06, - "loss": 0.0583, - "step": 330 - }, - { - "epoch": 0.08789166224110462, - "grad_norm": 1.5135689038875453, - "learning_rate": 8.753315649867374e-06, - "loss": 0.1641, - "step": 331 - }, - { - "epoch": 0.08815719596388741, - "grad_norm": 1.171616362989181, - "learning_rate": 8.779840848806366e-06, - "loss": 0.0967, - "step": 332 - }, - { - "epoch": 0.08842272968667021, - "grad_norm": 1.296129054455011, - "learning_rate": 8.806366047745358e-06, - "loss": 0.1279, - "step": 333 - }, - { - "epoch": 0.088688263409453, - "grad_norm": 1.7346960331999244, - "learning_rate": 8.832891246684351e-06, - "loss": 0.2031, - "step": 334 - }, - { - "epoch": 0.08895379713223579, - "grad_norm": 1.0461737141624319, - "learning_rate": 8.859416445623343e-06, - "loss": 0.0894, - "step": 335 - }, - { - "epoch": 0.08921933085501858, - "grad_norm": 0.6479647062091533, - "learning_rate": 8.885941644562335e-06, - "loss": 0.0439, - "step": 336 - }, - { - "epoch": 0.08948486457780139, - "grad_norm": 0.734572554709422, - "learning_rate": 8.912466843501327e-06, - "loss": 0.0479, - "step": 337 - }, - { - "epoch": 0.08975039830058418, - "grad_norm": 0.37460636172952666, - "learning_rate": 8.938992042440318e-06, - "loss": 0.0239, - "step": 338 - }, - { - "epoch": 0.09001593202336697, - "grad_norm": 0.7525268871585711, - "learning_rate": 8.965517241379312e-06, - "loss": 0.0579, - "step": 339 - }, - { - "epoch": 0.09028146574614976, - "grad_norm": 0.8611894174379208, - "learning_rate": 8.992042440318303e-06, - "loss": 0.0757, - "step": 340 - }, - { - "epoch": 0.09054699946893255, - "grad_norm": 0.7524042144808409, - "learning_rate": 9.018567639257295e-06, - "loss": 0.0579, - "step": 341 - }, - { - "epoch": 0.09081253319171535, - "grad_norm": 0.7389515011548652, - "learning_rate": 9.045092838196289e-06, - "loss": 0.0579, - "step": 342 - }, - { - "epoch": 0.09107806691449814, - "grad_norm": 0.717809617473176, - "learning_rate": 9.071618037135279e-06, - "loss": 0.0583, - "step": 343 - }, - { - "epoch": 0.09134360063728093, - "grad_norm": 3.9747880808948737, - "learning_rate": 9.098143236074272e-06, - "loss": 1.2812, - "step": 344 - }, - { - "epoch": 0.09160913436006372, - "grad_norm": 0.5440174756085783, - "learning_rate": 9.124668435013264e-06, - "loss": 0.0396, - "step": 345 - }, - { - "epoch": 0.09187466808284653, - "grad_norm": 0.5421286985756604, - "learning_rate": 9.151193633952255e-06, - "loss": 0.04, - "step": 346 - }, - { - "epoch": 0.09214020180562932, - "grad_norm": 0.7819761791319009, - "learning_rate": 9.177718832891247e-06, - "loss": 0.0698, - "step": 347 - }, - { - "epoch": 0.09240573552841211, - "grad_norm": 0.6347909735793549, - "learning_rate": 9.204244031830239e-06, - "loss": 0.0532, - "step": 348 - }, - { - "epoch": 0.0926712692511949, - "grad_norm": 6.09104951580698, - "learning_rate": 9.230769230769232e-06, - "loss": 1.3984, - "step": 349 - }, - { - "epoch": 0.09293680297397769, - "grad_norm": 1.041426014412066, - "learning_rate": 9.257294429708224e-06, - "loss": 0.1045, - "step": 350 - }, - { - "epoch": 0.0932023366967605, - "grad_norm": 0.47301291400697715, - "learning_rate": 9.283819628647216e-06, - "loss": 0.027, - "step": 351 - }, - { - "epoch": 0.09346787041954328, - "grad_norm": 0.8295019698863545, - "learning_rate": 9.310344827586207e-06, - "loss": 0.0693, - "step": 352 - }, - { - "epoch": 0.09373340414232607, - "grad_norm": 0.5101258233711066, - "learning_rate": 9.3368700265252e-06, - "loss": 0.0361, - "step": 353 - }, - { - "epoch": 0.09399893786510886, - "grad_norm": 0.846504832852156, - "learning_rate": 9.363395225464191e-06, - "loss": 0.0757, - "step": 354 - }, - { - "epoch": 0.09426447158789167, - "grad_norm": 1.228530334987593, - "learning_rate": 9.389920424403184e-06, - "loss": 0.0825, - "step": 355 - }, - { - "epoch": 0.09453000531067446, - "grad_norm": 1.0130610331102055, - "learning_rate": 9.416445623342176e-06, - "loss": 0.0903, - "step": 356 - }, - { - "epoch": 0.09479553903345725, - "grad_norm": 3.315689651222163, - "learning_rate": 9.442970822281168e-06, - "loss": 0.0698, - "step": 357 - }, - { - "epoch": 0.09506107275624004, - "grad_norm": 1.1215958152396903, - "learning_rate": 9.46949602122016e-06, - "loss": 0.0967, - "step": 358 - }, - { - "epoch": 0.09532660647902283, - "grad_norm": 0.9299426765923084, - "learning_rate": 9.496021220159151e-06, - "loss": 0.0767, - "step": 359 - }, - { - "epoch": 0.09559214020180563, - "grad_norm": 0.7265450766257645, - "learning_rate": 9.522546419098145e-06, - "loss": 0.0588, - "step": 360 - }, - { - "epoch": 0.09585767392458842, - "grad_norm": 0.8861616356173306, - "learning_rate": 9.549071618037136e-06, - "loss": 0.0894, - "step": 361 - }, - { - "epoch": 0.09612320764737121, - "grad_norm": 0.5801872302587202, - "learning_rate": 9.575596816976128e-06, - "loss": 0.0398, - "step": 362 - }, - { - "epoch": 0.096388741370154, - "grad_norm": 0.8463320496115545, - "learning_rate": 9.60212201591512e-06, - "loss": 0.0698, - "step": 363 - }, - { - "epoch": 0.09665427509293681, - "grad_norm": 1.0159728812303754, - "learning_rate": 9.628647214854112e-06, - "loss": 0.0825, - "step": 364 - }, - { - "epoch": 0.0969198088157196, - "grad_norm": 0.716278656041375, - "learning_rate": 9.655172413793105e-06, - "loss": 0.064, - "step": 365 - }, - { - "epoch": 0.09718534253850239, - "grad_norm": 0.9734713463730997, - "learning_rate": 9.681697612732097e-06, - "loss": 0.1035, - "step": 366 - }, - { - "epoch": 0.09745087626128518, - "grad_norm": 0.6573062869557463, - "learning_rate": 9.708222811671088e-06, - "loss": 0.0583, - "step": 367 - }, - { - "epoch": 0.09771640998406797, - "grad_norm": 0.4000747396309669, - "learning_rate": 9.73474801061008e-06, - "loss": 0.0259, - "step": 368 - }, - { - "epoch": 0.09798194370685077, - "grad_norm": 0.7259390646225368, - "learning_rate": 9.761273209549072e-06, - "loss": 0.0698, - "step": 369 - }, - { - "epoch": 0.09824747742963356, - "grad_norm": 0.5866962892418395, - "learning_rate": 9.787798408488064e-06, - "loss": 0.0439, - "step": 370 - }, - { - "epoch": 0.09851301115241635, - "grad_norm": 0.5114555343916287, - "learning_rate": 9.814323607427057e-06, - "loss": 0.0354, - "step": 371 - }, - { - "epoch": 0.09877854487519915, - "grad_norm": 0.6409404463637564, - "learning_rate": 9.840848806366049e-06, - "loss": 0.0525, - "step": 372 - }, - { - "epoch": 0.09904407859798195, - "grad_norm": 1.1219844119808053, - "learning_rate": 9.86737400530504e-06, - "loss": 0.1206, - "step": 373 - }, - { - "epoch": 0.09930961232076474, - "grad_norm": 0.553871104594828, - "learning_rate": 9.893899204244032e-06, - "loss": 0.0396, - "step": 374 - }, - { - "epoch": 0.09957514604354753, - "grad_norm": 0.9103969438290072, - "learning_rate": 9.920424403183024e-06, - "loss": 0.0825, - "step": 375 - }, - { - "epoch": 0.09984067976633032, - "grad_norm": 0.6220106264544292, - "learning_rate": 9.946949602122017e-06, - "loss": 0.053, - "step": 376 - }, - { - "epoch": 0.10010621348911311, - "grad_norm": 0.35674878941499344, - "learning_rate": 9.973474801061009e-06, - "loss": 0.0231, - "step": 377 - }, - { - "epoch": 0.10037174721189591, - "grad_norm": 0.829605031932029, - "learning_rate": 1e-05, - "loss": 0.0698, - "step": 378 - }, - { - "epoch": 0.1006372809346787, - "grad_norm": 5.884980972558981, - "learning_rate": 9.999999518029487e-06, - "loss": 1.0781, - "step": 379 - }, - { - "epoch": 0.1009028146574615, - "grad_norm": 0.5316843014168648, - "learning_rate": 9.999998072118034e-06, - "loss": 0.0396, - "step": 380 - }, - { - "epoch": 0.10116834838024429, - "grad_norm": 0.2781221112512736, - "learning_rate": 9.99999566226592e-06, - "loss": 0.0165, - "step": 381 - }, - { - "epoch": 0.10143388210302709, - "grad_norm": 0.48275929349374064, - "learning_rate": 9.999992288473615e-06, - "loss": 0.0393, - "step": 382 - }, - { - "epoch": 0.10169941582580988, - "grad_norm": 0.6278039868358899, - "learning_rate": 9.999987950741766e-06, - "loss": 0.0354, - "step": 383 - }, - { - "epoch": 0.10196494954859267, - "grad_norm": 0.6003466758282822, - "learning_rate": 9.99998264907121e-06, - "loss": 0.0361, - "step": 384 - }, - { - "epoch": 0.10223048327137546, - "grad_norm": 0.8912911423947107, - "learning_rate": 9.999976383462966e-06, - "loss": 0.0579, - "step": 385 - }, - { - "epoch": 0.10249601699415826, - "grad_norm": 0.7329391229020974, - "learning_rate": 9.999969153918246e-06, - "loss": 0.0435, - "step": 386 - }, - { - "epoch": 0.10276155071694106, - "grad_norm": 0.41829397558306736, - "learning_rate": 9.999960960438441e-06, - "loss": 0.0288, - "step": 387 - }, - { - "epoch": 0.10302708443972385, - "grad_norm": 0.9968145270135951, - "learning_rate": 9.999951803025134e-06, - "loss": 0.0757, - "step": 388 - }, - { - "epoch": 0.10329261816250664, - "grad_norm": 0.7676728389082504, - "learning_rate": 9.999941681680087e-06, - "loss": 0.053, - "step": 389 - }, - { - "epoch": 0.10355815188528943, - "grad_norm": 0.31126746105326625, - "learning_rate": 9.999930596405254e-06, - "loss": 0.0184, - "step": 390 - }, - { - "epoch": 0.10382368560807223, - "grad_norm": 0.5108289920792247, - "learning_rate": 9.999918547202769e-06, - "loss": 0.0354, - "step": 391 - }, - { - "epoch": 0.10408921933085502, - "grad_norm": 0.13796497678169306, - "learning_rate": 9.999905534074959e-06, - "loss": 0.0041, - "step": 392 - }, - { - "epoch": 0.10435475305363781, - "grad_norm": 0.7020072318414768, - "learning_rate": 9.999891557024329e-06, - "loss": 0.0435, - "step": 393 - }, - { - "epoch": 0.1046202867764206, - "grad_norm": 0.768122730327616, - "learning_rate": 9.999876616053574e-06, - "loss": 0.0435, - "step": 394 - }, - { - "epoch": 0.1048858204992034, - "grad_norm": 0.3543183385660938, - "learning_rate": 9.999860711165577e-06, - "loss": 0.0167, - "step": 395 - }, - { - "epoch": 0.1051513542219862, - "grad_norm": 0.23030281632455404, - "learning_rate": 9.999843842363402e-06, - "loss": 0.0115, - "step": 396 - }, - { - "epoch": 0.10541688794476899, - "grad_norm": 2.039085244924252, - "learning_rate": 9.999826009650302e-06, - "loss": 0.1279, - "step": 397 - }, - { - "epoch": 0.10568242166755178, - "grad_norm": 0.6863471545307963, - "learning_rate": 9.999807213029716e-06, - "loss": 0.0352, - "step": 398 - }, - { - "epoch": 0.10594795539033457, - "grad_norm": 0.2210748545563893, - "learning_rate": 9.999787452505265e-06, - "loss": 0.0103, - "step": 399 - }, - { - "epoch": 0.10621348911311737, - "grad_norm": 0.5710642294027447, - "learning_rate": 9.99976672808076e-06, - "loss": 0.0286, - "step": 400 - }, - { - "epoch": 0.10647902283590016, - "grad_norm": 1.681840306945433, - "learning_rate": 9.999745039760197e-06, - "loss": 0.1367, - "step": 401 - }, - { - "epoch": 0.10674455655868295, - "grad_norm": 0.5509992577306293, - "learning_rate": 9.999722387547759e-06, - "loss": 0.0076, - "step": 402 - }, - { - "epoch": 0.10701009028146574, - "grad_norm": 0.8731295387203799, - "learning_rate": 9.999698771447808e-06, - "loss": 0.0258, - "step": 403 - }, - { - "epoch": 0.10727562400424855, - "grad_norm": 0.09492425648826142, - "learning_rate": 9.9996741914649e-06, - "loss": 0.0025, - "step": 404 - }, - { - "epoch": 0.10754115772703134, - "grad_norm": 3.5100388797466575, - "learning_rate": 9.999648647603774e-06, - "loss": 0.332, - "step": 405 - }, - { - "epoch": 0.10780669144981413, - "grad_norm": 1.412486941835509, - "learning_rate": 9.999622139869356e-06, - "loss": 0.0688, - "step": 406 - }, - { - "epoch": 0.10807222517259692, - "grad_norm": 0.4019838803661139, - "learning_rate": 9.999594668266754e-06, - "loss": 0.0184, - "step": 407 - }, - { - "epoch": 0.10833775889537971, - "grad_norm": 0.10033143456094161, - "learning_rate": 9.999566232801263e-06, - "loss": 0.0006, - "step": 408 - }, - { - "epoch": 0.10860329261816251, - "grad_norm": 2.671317904601799, - "learning_rate": 9.999536833478367e-06, - "loss": 0.2031, - "step": 409 - }, - { - "epoch": 0.1088688263409453, - "grad_norm": 1.4428641692398059, - "learning_rate": 9.999506470303736e-06, - "loss": 0.0889, - "step": 410 - }, - { - "epoch": 0.10913436006372809, - "grad_norm": 0.8240991675135017, - "learning_rate": 9.999475143283219e-06, - "loss": 0.032, - "step": 411 - }, - { - "epoch": 0.10939989378651088, - "grad_norm": 0.6500544444722121, - "learning_rate": 9.999442852422858e-06, - "loss": 0.0286, - "step": 412 - }, - { - "epoch": 0.10966542750929369, - "grad_norm": 0.43616099112846474, - "learning_rate": 9.999409597728878e-06, - "loss": 0.0165, - "step": 413 - }, - { - "epoch": 0.10993096123207648, - "grad_norm": 0.4976424228594336, - "learning_rate": 9.99937537920769e-06, - "loss": 0.0229, - "step": 414 - }, - { - "epoch": 0.11019649495485927, - "grad_norm": 0.1461188597520504, - "learning_rate": 9.99934019686589e-06, - "loss": 0.0059, - "step": 415 - }, - { - "epoch": 0.11046202867764206, - "grad_norm": 2.5259359528371155, - "learning_rate": 9.999304050710264e-06, - "loss": 0.2461, - "step": 416 - }, - { - "epoch": 0.11072756240042485, - "grad_norm": 0.020285773595493026, - "learning_rate": 9.999266940747776e-06, - "loss": 0.0006, - "step": 417 - }, - { - "epoch": 0.11099309612320765, - "grad_norm": 2.126872840900898, - "learning_rate": 9.999228866985585e-06, - "loss": 0.1367, - "step": 418 - }, - { - "epoch": 0.11125862984599044, - "grad_norm": 0.10900902708813702, - "learning_rate": 9.999189829431029e-06, - "loss": 0.0035, - "step": 419 - }, - { - "epoch": 0.11152416356877323, - "grad_norm": 1.6722307761042388, - "learning_rate": 9.999149828091632e-06, - "loss": 0.1279, - "step": 420 - }, - { - "epoch": 0.11178969729155602, - "grad_norm": 0.3758020820014381, - "learning_rate": 9.99910886297511e-06, - "loss": 0.0164, - "step": 421 - }, - { - "epoch": 0.11205523101433883, - "grad_norm": 0.9309889664471976, - "learning_rate": 9.999066934089355e-06, - "loss": 0.0476, - "step": 422 - }, - { - "epoch": 0.11232076473712162, - "grad_norm": 1.9032139694898287, - "learning_rate": 9.999024041442455e-06, - "loss": 0.1641, - "step": 423 - }, - { - "epoch": 0.11258629845990441, - "grad_norm": 1.9393415053104626, - "learning_rate": 9.998980185042678e-06, - "loss": 0.1367, - "step": 424 - }, - { - "epoch": 0.1128518321826872, - "grad_norm": 1.7335978068394446, - "learning_rate": 9.99893536489848e-06, - "loss": 0.1553, - "step": 425 - }, - { - "epoch": 0.11311736590546999, - "grad_norm": 0.964411541181811, - "learning_rate": 9.998889581018498e-06, - "loss": 0.0525, - "step": 426 - }, - { - "epoch": 0.11338289962825279, - "grad_norm": 0.9732904594255166, - "learning_rate": 9.998842833411565e-06, - "loss": 0.0479, - "step": 427 - }, - { - "epoch": 0.11364843335103558, - "grad_norm": 0.3840885691796018, - "learning_rate": 9.998795122086687e-06, - "loss": 0.0184, - "step": 428 - }, - { - "epoch": 0.11391396707381837, - "grad_norm": 0.47672162986825717, - "learning_rate": 9.998746447053063e-06, - "loss": 0.0259, - "step": 429 - }, - { - "epoch": 0.11417950079660116, - "grad_norm": 0.06479878514865638, - "learning_rate": 9.998696808320083e-06, - "loss": 0.0018, - "step": 430 - }, - { - "epoch": 0.11444503451938397, - "grad_norm": 0.8617385798684972, - "learning_rate": 9.99864620589731e-06, - "loss": 0.0576, - "step": 431 - }, - { - "epoch": 0.11471056824216676, - "grad_norm": 0.25886899017260007, - "learning_rate": 9.998594639794502e-06, - "loss": 0.0095, - "step": 432 - }, - { - "epoch": 0.11497610196494955, - "grad_norm": 0.6784186872332681, - "learning_rate": 9.9985421100216e-06, - "loss": 0.0393, - "step": 433 - }, - { - "epoch": 0.11524163568773234, - "grad_norm": 1.7431553785848855, - "learning_rate": 9.998488616588733e-06, - "loss": 0.1455, - "step": 434 - }, - { - "epoch": 0.11550716941051513, - "grad_norm": 1.9060354513298985, - "learning_rate": 9.998434159506211e-06, - "loss": 0.1934, - "step": 435 - }, - { - "epoch": 0.11577270313329793, - "grad_norm": 0.19317549952322358, - "learning_rate": 9.998378738784536e-06, - "loss": 0.0092, - "step": 436 - }, - { - "epoch": 0.11603823685608072, - "grad_norm": 0.7688675078104924, - "learning_rate": 9.998322354434388e-06, - "loss": 0.0432, - "step": 437 - }, - { - "epoch": 0.11630377057886351, - "grad_norm": 0.7369207362136836, - "learning_rate": 9.998265006466642e-06, - "loss": 0.0432, - "step": 438 - }, - { - "epoch": 0.1165693043016463, - "grad_norm": 0.29436457021835755, - "learning_rate": 9.998206694892352e-06, - "loss": 0.0146, - "step": 439 - }, - { - "epoch": 0.11683483802442911, - "grad_norm": 0.9124118545569474, - "learning_rate": 9.998147419722759e-06, - "loss": 0.0525, - "step": 440 - }, - { - "epoch": 0.1171003717472119, - "grad_norm": 1.1218128409435468, - "learning_rate": 9.99808718096929e-06, - "loss": 0.0635, - "step": 441 - }, - { - "epoch": 0.11736590546999469, - "grad_norm": 1.149876905466913, - "learning_rate": 9.99802597864356e-06, - "loss": 0.0752, - "step": 442 - }, - { - "epoch": 0.11763143919277748, - "grad_norm": 1.0569853424545377, - "learning_rate": 9.997963812757368e-06, - "loss": 0.0635, - "step": 443 - }, - { - "epoch": 0.11789697291556028, - "grad_norm": 0.8351903364180481, - "learning_rate": 9.997900683322698e-06, - "loss": 0.0476, - "step": 444 - }, - { - "epoch": 0.11816250663834307, - "grad_norm": 1.7702924922542935, - "learning_rate": 9.997836590351721e-06, - "loss": 0.2246, - "step": 445 - }, - { - "epoch": 0.11842804036112586, - "grad_norm": 0.14841631560098084, - "learning_rate": 9.997771533856794e-06, - "loss": 0.0072, - "step": 446 - }, - { - "epoch": 0.11869357408390865, - "grad_norm": 0.2930240108885635, - "learning_rate": 9.997705513850458e-06, - "loss": 0.0165, - "step": 447 - }, - { - "epoch": 0.11895910780669144, - "grad_norm": 0.027324664896434907, - "learning_rate": 9.997638530345442e-06, - "loss": 0.001, - "step": 448 - }, - { - "epoch": 0.11922464152947425, - "grad_norm": 0.8050072457659047, - "learning_rate": 9.997570583354659e-06, - "loss": 0.0435, - "step": 449 - }, - { - "epoch": 0.11949017525225704, - "grad_norm": 0.8260959431214968, - "learning_rate": 9.997501672891208e-06, - "loss": 0.0579, - "step": 450 - }, - { - "epoch": 0.11975570897503983, - "grad_norm": 0.050172710801976175, - "learning_rate": 9.997431798968376e-06, - "loss": 0.0021, - "step": 451 - }, - { - "epoch": 0.12002124269782262, - "grad_norm": 0.3333884739733433, - "learning_rate": 9.99736096159963e-06, - "loss": 0.0147, - "step": 452 - }, - { - "epoch": 0.12028677642060542, - "grad_norm": 0.6853848215343389, - "learning_rate": 9.997289160798629e-06, - "loss": 0.0231, - "step": 453 - }, - { - "epoch": 0.12055231014338821, - "grad_norm": 6.935657752668328, - "learning_rate": 9.997216396579217e-06, - "loss": 1.2891, - "step": 454 - }, - { - "epoch": 0.120817843866171, - "grad_norm": 1.2388612554211353, - "learning_rate": 9.99714266895542e-06, - "loss": 0.0579, - "step": 455 - }, - { - "epoch": 0.1210833775889538, - "grad_norm": 0.8108343213894442, - "learning_rate": 9.997067977941453e-06, - "loss": 0.0354, - "step": 456 - }, - { - "epoch": 0.12134891131173658, - "grad_norm": 0.12285291127516768, - "learning_rate": 9.996992323551713e-06, - "loss": 0.0057, - "step": 457 - }, - { - "epoch": 0.12161444503451939, - "grad_norm": 1.8644514519390076, - "learning_rate": 9.996915705800786e-06, - "loss": 0.2031, - "step": 458 - }, - { - "epoch": 0.12187997875730218, - "grad_norm": 0.4192870410785925, - "learning_rate": 9.996838124703448e-06, - "loss": 0.0118, - "step": 459 - }, - { - "epoch": 0.12214551248008497, - "grad_norm": 13.085772336960922, - "learning_rate": 9.996759580274649e-06, - "loss": 1.1562, - "step": 460 - }, - { - "epoch": 0.12241104620286776, - "grad_norm": 1.9383018555391174, - "learning_rate": 9.996680072529536e-06, - "loss": 0.1455, - "step": 461 - }, - { - "epoch": 0.12267657992565056, - "grad_norm": 0.5114158494450113, - "learning_rate": 9.996599601483435e-06, - "loss": 0.0166, - "step": 462 - }, - { - "epoch": 0.12294211364843335, - "grad_norm": 2.071144791223387, - "learning_rate": 9.996518167151858e-06, - "loss": 0.3105, - "step": 463 - }, - { - "epoch": 0.12320764737121614, - "grad_norm": 0.3826668161577709, - "learning_rate": 9.996435769550509e-06, - "loss": 0.0184, - "step": 464 - }, - { - "epoch": 0.12347318109399893, - "grad_norm": 0.11580831376705496, - "learning_rate": 9.99635240869527e-06, - "loss": 0.0057, - "step": 465 - }, - { - "epoch": 0.12373871481678173, - "grad_norm": 2.0940806054901633, - "learning_rate": 9.996268084602213e-06, - "loss": 0.1836, - "step": 466 - }, - { - "epoch": 0.12400424853956453, - "grad_norm": 1.7889639735230736, - "learning_rate": 9.996182797287597e-06, - "loss": 0.1934, - "step": 467 - }, - { - "epoch": 0.12426978226234732, - "grad_norm": 0.274643361135419, - "learning_rate": 9.99609654676786e-06, - "loss": 0.0095, - "step": 468 - }, - { - "epoch": 0.12453531598513011, - "grad_norm": 0.37891937937045345, - "learning_rate": 9.996009333059633e-06, - "loss": 0.0206, - "step": 469 - }, - { - "epoch": 0.1248008497079129, - "grad_norm": 0.5048830191970928, - "learning_rate": 9.995921156179729e-06, - "loss": 0.0229, - "step": 470 - }, - { - "epoch": 0.1250663834306957, - "grad_norm": 2.793136832206918, - "learning_rate": 9.995832016145149e-06, - "loss": 0.2988, - "step": 471 - }, - { - "epoch": 0.1253319171534785, - "grad_norm": 0.9181078850272798, - "learning_rate": 9.995741912973073e-06, - "loss": 0.0479, - "step": 472 - }, - { - "epoch": 0.12559745087626129, - "grad_norm": 0.17214860190032075, - "learning_rate": 9.995650846680878e-06, - "loss": 0.0025, - "step": 473 - }, - { - "epoch": 0.12586298459904408, - "grad_norm": 0.1930329238705798, - "learning_rate": 9.995558817286117e-06, - "loss": 0.0092, - "step": 474 - }, - { - "epoch": 0.12612851832182687, - "grad_norm": 0.3890866531462164, - "learning_rate": 9.995465824806534e-06, - "loss": 0.0206, - "step": 475 - }, - { - "epoch": 0.12639405204460966, - "grad_norm": 0.294426458994664, - "learning_rate": 9.995371869260057e-06, - "loss": 0.0146, - "step": 476 - }, - { - "epoch": 0.12665958576739245, - "grad_norm": 0.49377766768440445, - "learning_rate": 9.995276950664796e-06, - "loss": 0.0234, - "step": 477 - }, - { - "epoch": 0.12692511949017526, - "grad_norm": 1.6403902226241507, - "learning_rate": 9.995181069039055e-06, - "loss": 0.1035, - "step": 478 - }, - { - "epoch": 0.12719065321295805, - "grad_norm": 2.3189750279643517, - "learning_rate": 9.995084224401316e-06, - "loss": 0.1934, - "step": 479 - }, - { - "epoch": 0.12745618693574085, - "grad_norm": 4.2411804627535385, - "learning_rate": 9.99498641677025e-06, - "loss": 0.7969, - "step": 480 - }, - { - "epoch": 0.12772172065852364, - "grad_norm": 0.6775891633342407, - "learning_rate": 9.994887646164712e-06, - "loss": 0.0354, - "step": 481 - }, - { - "epoch": 0.12798725438130643, - "grad_norm": 0.3451145426368071, - "learning_rate": 9.994787912603747e-06, - "loss": 0.0167, - "step": 482 - }, - { - "epoch": 0.12825278810408922, - "grad_norm": 0.7928434044000325, - "learning_rate": 9.99468721610658e-06, - "loss": 0.0396, - "step": 483 - }, - { - "epoch": 0.128518321826872, - "grad_norm": 0.35058719889346857, - "learning_rate": 9.994585556692624e-06, - "loss": 0.0188, - "step": 484 - }, - { - "epoch": 0.1287838555496548, - "grad_norm": 0.10110600298106223, - "learning_rate": 9.994482934381478e-06, - "loss": 0.0037, - "step": 485 - }, - { - "epoch": 0.1290493892724376, - "grad_norm": 0.8111935229536589, - "learning_rate": 9.994379349192927e-06, - "loss": 0.0393, - "step": 486 - }, - { - "epoch": 0.1293149229952204, - "grad_norm": 0.36191339865591954, - "learning_rate": 9.994274801146942e-06, - "loss": 0.0165, - "step": 487 - }, - { - "epoch": 0.1295804567180032, - "grad_norm": 1.4295506331809191, - "learning_rate": 9.994169290263675e-06, - "loss": 0.0688, - "step": 488 - }, - { - "epoch": 0.12984599044078599, - "grad_norm": 1.4477435816269884, - "learning_rate": 9.99406281656347e-06, - "loss": 0.1191, - "step": 489 - }, - { - "epoch": 0.13011152416356878, - "grad_norm": 0.43333547219941215, - "learning_rate": 9.993955380066856e-06, - "loss": 0.0168, - "step": 490 - }, - { - "epoch": 0.13037705788635157, - "grad_norm": 1.135520056624293, - "learning_rate": 9.993846980794542e-06, - "loss": 0.063, - "step": 491 - }, - { - "epoch": 0.13064259160913436, - "grad_norm": 0.043875501106062434, - "learning_rate": 9.993737618767426e-06, - "loss": 0.0015, - "step": 492 - }, - { - "epoch": 0.13090812533191715, - "grad_norm": 0.6090554091571819, - "learning_rate": 9.993627294006592e-06, - "loss": 0.0286, - "step": 493 - }, - { - "epoch": 0.13117365905469994, - "grad_norm": 1.2750886594774946, - "learning_rate": 9.993516006533311e-06, - "loss": 0.0576, - "step": 494 - }, - { - "epoch": 0.13143919277748273, - "grad_norm": 0.09866268692375627, - "learning_rate": 9.993403756369037e-06, - "loss": 0.0036, - "step": 495 - }, - { - "epoch": 0.13170472650026555, - "grad_norm": 2.5466564151997866, - "learning_rate": 9.993290543535412e-06, - "loss": 0.2148, - "step": 496 - }, - { - "epoch": 0.13197026022304834, - "grad_norm": 1.0356354392876912, - "learning_rate": 9.993176368054258e-06, - "loss": 0.0476, - "step": 497 - }, - { - "epoch": 0.13223579394583113, - "grad_norm": 0.1524377921395385, - "learning_rate": 9.993061229947591e-06, - "loss": 0.0072, - "step": 498 - }, - { - "epoch": 0.13250132766861392, - "grad_norm": 5.508400351087429, - "learning_rate": 9.992945129237607e-06, - "loss": 1.0156, - "step": 499 - }, - { - "epoch": 0.1327668613913967, - "grad_norm": 1.8391859755052025, - "learning_rate": 9.992828065946687e-06, - "loss": 0.2344, - "step": 500 - }, - { - "epoch": 0.1330323951141795, - "grad_norm": 0.2511312439828377, - "learning_rate": 9.992710040097402e-06, - "loss": 0.01, - "step": 501 - }, - { - "epoch": 0.1332979288369623, - "grad_norm": 1.6141514471649456, - "learning_rate": 9.992591051712503e-06, - "loss": 0.1738, - "step": 502 - }, - { - "epoch": 0.13356346255974508, - "grad_norm": 4.287684776700203, - "learning_rate": 9.992471100814934e-06, - "loss": 0.6641, - "step": 503 - }, - { - "epoch": 0.13382899628252787, - "grad_norm": 0.7498793642360017, - "learning_rate": 9.992350187427814e-06, - "loss": 0.0476, - "step": 504 - }, - { - "epoch": 0.13409453000531069, - "grad_norm": 0.7170932959418976, - "learning_rate": 9.992228311574462e-06, - "loss": 0.0393, - "step": 505 - }, - { - "epoch": 0.13436006372809348, - "grad_norm": 1.0630972502146814, - "learning_rate": 9.992105473278366e-06, - "loss": 0.0688, - "step": 506 - }, - { - "epoch": 0.13462559745087627, - "grad_norm": 1.605822286298563, - "learning_rate": 9.991981672563211e-06, - "loss": 0.0815, - "step": 507 - }, - { - "epoch": 0.13489113117365906, - "grad_norm": 0.606270866795599, - "learning_rate": 9.991856909452866e-06, - "loss": 0.032, - "step": 508 - }, - { - "epoch": 0.13515666489644185, - "grad_norm": 1.3621617997303943, - "learning_rate": 9.99173118397138e-06, - "loss": 0.1035, - "step": 509 - }, - { - "epoch": 0.13542219861922464, - "grad_norm": 0.12342567577614295, - "learning_rate": 9.991604496142997e-06, - "loss": 0.0064, - "step": 510 - }, - { - "epoch": 0.13568773234200743, - "grad_norm": 1.044690642843683, - "learning_rate": 9.991476845992137e-06, - "loss": 0.053, - "step": 511 - }, - { - "epoch": 0.13595326606479022, - "grad_norm": 0.35855152173267857, - "learning_rate": 9.991348233543408e-06, - "loss": 0.0206, - "step": 512 - }, - { - "epoch": 0.136218799787573, - "grad_norm": 1.57497690926948, - "learning_rate": 9.991218658821609e-06, - "loss": 0.2246, - "step": 513 - }, - { - "epoch": 0.13648433351035583, - "grad_norm": 0.1113998089521789, - "learning_rate": 9.991088121851716e-06, - "loss": 0.0057, - "step": 514 - }, - { - "epoch": 0.13674986723313862, - "grad_norm": 0.22281969208169422, - "learning_rate": 9.990956622658901e-06, - "loss": 0.0117, - "step": 515 - }, - { - "epoch": 0.1370154009559214, - "grad_norm": 0.2932759441130008, - "learning_rate": 9.990824161268509e-06, - "loss": 0.0148, - "step": 516 - }, - { - "epoch": 0.1372809346787042, - "grad_norm": 0.10622372878984855, - "learning_rate": 9.990690737706081e-06, - "loss": 0.0057, - "step": 517 - }, - { - "epoch": 0.137546468401487, - "grad_norm": 1.5597393946203308, - "learning_rate": 9.990556351997338e-06, - "loss": 0.1113, - "step": 518 - }, - { - "epoch": 0.13781200212426978, - "grad_norm": 1.9903611428670367, - "learning_rate": 9.99042100416819e-06, - "loss": 0.0703, - "step": 519 - }, - { - "epoch": 0.13807753584705257, - "grad_norm": 0.35832314011240834, - "learning_rate": 9.990284694244729e-06, - "loss": 0.0206, - "step": 520 - }, - { - "epoch": 0.13834306956983536, - "grad_norm": 5.2897686383169455, - "learning_rate": 9.990147422253234e-06, - "loss": 0.8125, - "step": 521 - }, - { - "epoch": 0.13860860329261815, - "grad_norm": 0.11713567466813415, - "learning_rate": 9.990009188220166e-06, - "loss": 0.0039, - "step": 522 - }, - { - "epoch": 0.13887413701540097, - "grad_norm": 0.5823075698755563, - "learning_rate": 9.989869992172183e-06, - "loss": 0.0242, - "step": 523 - }, - { - "epoch": 0.13913967073818376, - "grad_norm": 0.5765768333479371, - "learning_rate": 9.989729834136113e-06, - "loss": 0.0193, - "step": 524 - }, - { - "epoch": 0.13940520446096655, - "grad_norm": 0.14344215523221354, - "learning_rate": 9.989588714138977e-06, - "loss": 0.0039, - "step": 525 - }, - { - "epoch": 0.13967073818374934, - "grad_norm": 1.7019424437997808, - "learning_rate": 9.989446632207989e-06, - "loss": 0.0752, - "step": 526 - }, - { - "epoch": 0.13993627190653213, - "grad_norm": 0.2075958214948594, - "learning_rate": 9.989303588370531e-06, - "loss": 0.0117, - "step": 527 - }, - { - "epoch": 0.14020180562931492, - "grad_norm": 0.15118506962449466, - "learning_rate": 9.989159582654187e-06, - "loss": 0.0082, - "step": 528 - }, - { - "epoch": 0.1404673393520977, - "grad_norm": 0.07268392646364422, - "learning_rate": 9.989014615086716e-06, - "loss": 0.0035, - "step": 529 - }, - { - "epoch": 0.1407328730748805, - "grad_norm": 1.4906910982100858, - "learning_rate": 9.988868685696067e-06, - "loss": 0.2129, - "step": 530 - }, - { - "epoch": 0.14099840679766332, - "grad_norm": 1.4326095317287415, - "learning_rate": 9.988721794510374e-06, - "loss": 0.1836, - "step": 531 - }, - { - "epoch": 0.1412639405204461, - "grad_norm": 0.5130258898072058, - "learning_rate": 9.988573941557956e-06, - "loss": 0.0177, - "step": 532 - }, - { - "epoch": 0.1415294742432289, - "grad_norm": 1.3951302100850165, - "learning_rate": 9.988425126867317e-06, - "loss": 0.1641, - "step": 533 - }, - { - "epoch": 0.1417950079660117, - "grad_norm": 1.5620173297534108, - "learning_rate": 9.988275350467145e-06, - "loss": 0.082, - "step": 534 - }, - { - "epoch": 0.14206054168879448, - "grad_norm": 1.5140269849638728, - "learning_rate": 9.988124612386319e-06, - "loss": 0.1367, - "step": 535 - }, - { - "epoch": 0.14232607541157727, - "grad_norm": 0.6063242319852478, - "learning_rate": 9.987972912653894e-06, - "loss": 0.0393, - "step": 536 - }, - { - "epoch": 0.14259160913436006, - "grad_norm": 0.7374682676713821, - "learning_rate": 9.987820251299121e-06, - "loss": 0.0435, - "step": 537 - }, - { - "epoch": 0.14285714285714285, - "grad_norm": 1.39588526651874, - "learning_rate": 9.987666628351432e-06, - "loss": 0.1553, - "step": 538 - }, - { - "epoch": 0.14312267657992564, - "grad_norm": 2.2114571113292394, - "learning_rate": 9.987512043840437e-06, - "loss": 0.1113, - "step": 539 - }, - { - "epoch": 0.14338821030270846, - "grad_norm": 0.42061182715165285, - "learning_rate": 9.987356497795944e-06, - "loss": 0.0286, - "step": 540 - }, - { - "epoch": 0.14365374402549125, - "grad_norm": 0.9900996921832054, - "learning_rate": 9.987199990247938e-06, - "loss": 0.0752, - "step": 541 - }, - { - "epoch": 0.14391927774827404, - "grad_norm": 0.7148831432764048, - "learning_rate": 9.987042521226594e-06, - "loss": 0.0476, - "step": 542 - }, - { - "epoch": 0.14418481147105683, - "grad_norm": 1.5856605293259478, - "learning_rate": 9.986884090762266e-06, - "loss": 0.1123, - "step": 543 - }, - { - "epoch": 0.14445034519383962, - "grad_norm": 0.17251194671640718, - "learning_rate": 9.986724698885502e-06, - "loss": 0.0058, - "step": 544 - }, - { - "epoch": 0.1447158789166224, - "grad_norm": 0.7025948256764871, - "learning_rate": 9.986564345627027e-06, - "loss": 0.0322, - "step": 545 - }, - { - "epoch": 0.1449814126394052, - "grad_norm": 0.039152169713499795, - "learning_rate": 9.98640303101776e-06, - "loss": 0.0011, - "step": 546 - }, - { - "epoch": 0.145246946362188, - "grad_norm": 0.7346842884881487, - "learning_rate": 9.986240755088797e-06, - "loss": 0.033, - "step": 547 - }, - { - "epoch": 0.14551248008497078, - "grad_norm": 0.8712665193969032, - "learning_rate": 9.986077517871424e-06, - "loss": 0.0476, - "step": 548 - }, - { - "epoch": 0.1457780138077536, - "grad_norm": 0.4876598992779223, - "learning_rate": 9.98591331939711e-06, - "loss": 0.0234, - "step": 549 - }, - { - "epoch": 0.1460435475305364, - "grad_norm": 0.5723125307338557, - "learning_rate": 9.985748159697511e-06, - "loss": 0.0432, - "step": 550 - }, - { - "epoch": 0.14630908125331918, - "grad_norm": 5.026172323034893, - "learning_rate": 9.985582038804469e-06, - "loss": 0.7734, - "step": 551 - }, - { - "epoch": 0.14657461497610197, - "grad_norm": 0.691831344385777, - "learning_rate": 9.985414956750008e-06, - "loss": 0.0289, - "step": 552 - }, - { - "epoch": 0.14684014869888476, - "grad_norm": 0.5313434024030962, - "learning_rate": 9.985246913566343e-06, - "loss": 0.0121, - "step": 553 - }, - { - "epoch": 0.14710568242166755, - "grad_norm": 0.5022681233131463, - "learning_rate": 9.985077909285867e-06, - "loss": 0.0391, - "step": 554 - }, - { - "epoch": 0.14737121614445034, - "grad_norm": 2.8350298800583738, - "learning_rate": 9.984907943941164e-06, - "loss": 0.2988, - "step": 555 - }, - { - "epoch": 0.14763674986723313, - "grad_norm": 0.5845567847490889, - "learning_rate": 9.984737017565003e-06, - "loss": 0.0435, - "step": 556 - }, - { - "epoch": 0.14790228359001592, - "grad_norm": 5.065900056975599, - "learning_rate": 9.984565130190331e-06, - "loss": 0.3242, - "step": 557 - }, - { - "epoch": 0.14816781731279874, - "grad_norm": 0.9255981265488107, - "learning_rate": 9.984392281850293e-06, - "loss": 0.0889, - "step": 558 - }, - { - "epoch": 0.14843335103558153, - "grad_norm": 0.7464338462336892, - "learning_rate": 9.984218472578205e-06, - "loss": 0.0576, - "step": 559 - }, - { - "epoch": 0.14869888475836432, - "grad_norm": 0.9740861593787775, - "learning_rate": 9.984043702407581e-06, - "loss": 0.0957, - "step": 560 - }, - { - "epoch": 0.1489644184811471, - "grad_norm": 0.4188005366867957, - "learning_rate": 9.983867971372113e-06, - "loss": 0.0286, - "step": 561 - }, - { - "epoch": 0.1492299522039299, - "grad_norm": 3.331842808759764, - "learning_rate": 9.983691279505678e-06, - "loss": 0.3398, - "step": 562 - }, - { - "epoch": 0.1494954859267127, - "grad_norm": 4.35094959797938, - "learning_rate": 9.983513626842342e-06, - "loss": 0.6523, - "step": 563 - }, - { - "epoch": 0.14976101964949548, - "grad_norm": 0.5123642715371224, - "learning_rate": 9.983335013416354e-06, - "loss": 0.0391, - "step": 564 - }, - { - "epoch": 0.15002655337227827, - "grad_norm": 0.796553563912584, - "learning_rate": 9.983155439262148e-06, - "loss": 0.0579, - "step": 565 - }, - { - "epoch": 0.15029208709506106, - "grad_norm": 0.3439490402717059, - "learning_rate": 9.982974904414344e-06, - "loss": 0.0229, - "step": 566 - }, - { - "epoch": 0.15055762081784388, - "grad_norm": 0.4106869676123719, - "learning_rate": 9.982793408907747e-06, - "loss": 0.0186, - "step": 567 - }, - { - "epoch": 0.15082315454062667, - "grad_norm": 0.7143183560111053, - "learning_rate": 9.982610952777347e-06, - "loss": 0.0393, - "step": 568 - }, - { - "epoch": 0.15108868826340946, - "grad_norm": 0.6128505770192598, - "learning_rate": 9.982427536058321e-06, - "loss": 0.0476, - "step": 569 - }, - { - "epoch": 0.15135422198619225, - "grad_norm": 0.2585949344039441, - "learning_rate": 9.98224315878603e-06, - "loss": 0.0165, - "step": 570 - }, - { - "epoch": 0.15161975570897504, - "grad_norm": 0.11924071710544426, - "learning_rate": 9.982057820996015e-06, - "loss": 0.0039, - "step": 571 - }, - { - "epoch": 0.15188528943175783, - "grad_norm": 0.6551332258487033, - "learning_rate": 9.98187152272401e-06, - "loss": 0.0391, - "step": 572 - }, - { - "epoch": 0.15215082315454062, - "grad_norm": 2.691563544451101, - "learning_rate": 9.981684264005934e-06, - "loss": 0.2031, - "step": 573 - }, - { - "epoch": 0.1524163568773234, - "grad_norm": 1.360641707061635, - "learning_rate": 9.981496044877882e-06, - "loss": 0.1934, - "step": 574 - }, - { - "epoch": 0.1526818906001062, - "grad_norm": 0.43346474177116345, - "learning_rate": 9.981306865376146e-06, - "loss": 0.0139, - "step": 575 - }, - { - "epoch": 0.15294742432288902, - "grad_norm": 0.24328200960959998, - "learning_rate": 9.981116725537195e-06, - "loss": 0.0165, - "step": 576 - }, - { - "epoch": 0.1532129580456718, - "grad_norm": 1.0426671122461846, - "learning_rate": 9.980925625397686e-06, - "loss": 0.0576, - "step": 577 - }, - { - "epoch": 0.1534784917684546, - "grad_norm": 0.1999935342186825, - "learning_rate": 9.98073356499446e-06, - "loss": 0.0131, - "step": 578 - }, - { - "epoch": 0.1537440254912374, - "grad_norm": 0.7190524116140479, - "learning_rate": 9.980540544364546e-06, - "loss": 0.0525, - "step": 579 - }, - { - "epoch": 0.15400955921402018, - "grad_norm": 2.4567461753510256, - "learning_rate": 9.980346563545156e-06, - "loss": 0.1035, - "step": 580 - }, - { - "epoch": 0.15427509293680297, - "grad_norm": 1.3907008477488672, - "learning_rate": 9.980151622573684e-06, - "loss": 0.1279, - "step": 581 - }, - { - "epoch": 0.15454062665958576, - "grad_norm": 1.6662654026394559, - "learning_rate": 9.979955721487716e-06, - "loss": 0.0752, - "step": 582 - }, - { - "epoch": 0.15480616038236855, - "grad_norm": 0.2905768001459995, - "learning_rate": 9.97975886032502e-06, - "loss": 0.0104, - "step": 583 - }, - { - "epoch": 0.15507169410515134, - "grad_norm": 0.5722935674089386, - "learning_rate": 9.979561039123544e-06, - "loss": 0.0391, - "step": 584 - }, - { - "epoch": 0.15533722782793416, - "grad_norm": 3.9956006342859354, - "learning_rate": 9.979362257921428e-06, - "loss": 0.1699, - "step": 585 - }, - { - "epoch": 0.15560276155071695, - "grad_norm": 1.4427656836880338, - "learning_rate": 9.979162516756995e-06, - "loss": 0.0889, - "step": 586 - }, - { - "epoch": 0.15586829527349974, - "grad_norm": 1.105527580649903, - "learning_rate": 9.978961815668753e-06, - "loss": 0.0752, - "step": 587 - }, - { - "epoch": 0.15613382899628253, - "grad_norm": 0.1918918255795427, - "learning_rate": 9.978760154695393e-06, - "loss": 0.0059, - "step": 588 - }, - { - "epoch": 0.15639936271906532, - "grad_norm": 0.04571838620835277, - "learning_rate": 9.978557533875797e-06, - "loss": 0.0011, - "step": 589 - }, - { - "epoch": 0.1566648964418481, - "grad_norm": 1.9273246134899191, - "learning_rate": 9.978353953249023e-06, - "loss": 0.1641, - "step": 590 - }, - { - "epoch": 0.1569304301646309, - "grad_norm": 1.3054086665266786, - "learning_rate": 9.97814941285432e-06, - "loss": 0.0693, - "step": 591 - }, - { - "epoch": 0.1571959638874137, - "grad_norm": 0.030533978901150845, - "learning_rate": 9.977943912731126e-06, - "loss": 0.0007, - "step": 592 - }, - { - "epoch": 0.15746149761019648, - "grad_norm": 0.9348392424847526, - "learning_rate": 9.977737452919051e-06, - "loss": 0.0525, - "step": 593 - }, - { - "epoch": 0.1577270313329793, - "grad_norm": 0.6672615655023215, - "learning_rate": 9.977530033457906e-06, - "loss": 0.0317, - "step": 594 - }, - { - "epoch": 0.1579925650557621, - "grad_norm": 0.9167557805831286, - "learning_rate": 9.977321654387671e-06, - "loss": 0.0525, - "step": 595 - }, - { - "epoch": 0.15825809877854488, - "grad_norm": 0.5548577985701605, - "learning_rate": 9.977112315748525e-06, - "loss": 0.0391, - "step": 596 - }, - { - "epoch": 0.15852363250132767, - "grad_norm": 1.0741347246530948, - "learning_rate": 9.976902017580824e-06, - "loss": 0.0815, - "step": 597 - }, - { - "epoch": 0.15878916622411046, - "grad_norm": 0.1973076257512853, - "learning_rate": 9.976690759925112e-06, - "loss": 0.0058, - "step": 598 - }, - { - "epoch": 0.15905469994689325, - "grad_norm": 0.37899730498629314, - "learning_rate": 9.976478542822114e-06, - "loss": 0.0187, - "step": 599 - }, - { - "epoch": 0.15932023366967604, - "grad_norm": 0.030062306649064036, - "learning_rate": 9.976265366312746e-06, - "loss": 0.0008, - "step": 600 - }, - { - "epoch": 0.15958576739245883, - "grad_norm": 0.8706157736283716, - "learning_rate": 9.976051230438107e-06, - "loss": 0.063, - "step": 601 - }, - { - "epoch": 0.15985130111524162, - "grad_norm": 0.06252640600648844, - "learning_rate": 9.975836135239475e-06, - "loss": 0.0019, - "step": 602 - }, - { - "epoch": 0.16011683483802444, - "grad_norm": 1.020235806615748, - "learning_rate": 9.975620080758321e-06, - "loss": 0.0576, - "step": 603 - }, - { - "epoch": 0.16038236856080723, - "grad_norm": 0.6968794165000528, - "learning_rate": 9.975403067036298e-06, - "loss": 0.0432, - "step": 604 - }, - { - "epoch": 0.16064790228359002, - "grad_norm": 2.0675935959691527, - "learning_rate": 9.975185094115243e-06, - "loss": 0.1455, - "step": 605 - }, - { - "epoch": 0.1609134360063728, - "grad_norm": 0.07887764560075496, - "learning_rate": 9.97496616203718e-06, - "loss": 0.0014, - "step": 606 - }, - { - "epoch": 0.1611789697291556, - "grad_norm": 1.0038423253490598, - "learning_rate": 9.974746270844313e-06, - "loss": 0.0476, - "step": 607 - }, - { - "epoch": 0.1614445034519384, - "grad_norm": 0.015512665639925233, - "learning_rate": 9.974525420579039e-06, - "loss": 0.0003, - "step": 608 - }, - { - "epoch": 0.16171003717472118, - "grad_norm": 0.1729310979679677, - "learning_rate": 9.97430361128393e-06, - "loss": 0.0065, - "step": 609 - }, - { - "epoch": 0.16197557089750397, - "grad_norm": 0.9007630781441461, - "learning_rate": 9.974080843001755e-06, - "loss": 0.0576, - "step": 610 - }, - { - "epoch": 0.16224110462028676, - "grad_norm": 0.8783601833486903, - "learning_rate": 9.973857115775455e-06, - "loss": 0.0342, - "step": 611 - }, - { - "epoch": 0.16250663834306958, - "grad_norm": 1.7341324095089548, - "learning_rate": 9.973632429648165e-06, - "loss": 0.0752, - "step": 612 - }, - { - "epoch": 0.16277217206585237, - "grad_norm": 1.4666956044492996, - "learning_rate": 9.9734067846632e-06, - "loss": 0.0815, - "step": 613 - }, - { - "epoch": 0.16303770578863516, - "grad_norm": 0.8113376617255486, - "learning_rate": 9.973180180864064e-06, - "loss": 0.0317, - "step": 614 - }, - { - "epoch": 0.16330323951141795, - "grad_norm": 2.5241610625807303, - "learning_rate": 9.972952618294442e-06, - "loss": 0.1738, - "step": 615 - }, - { - "epoch": 0.16356877323420074, - "grad_norm": 0.008095688510017955, - "learning_rate": 9.972724096998206e-06, - "loss": 0.0001, - "step": 616 - }, - { - "epoch": 0.16383430695698353, - "grad_norm": 0.9981390757792195, - "learning_rate": 9.97249461701941e-06, - "loss": 0.0576, - "step": 617 - }, - { - "epoch": 0.16409984067976632, - "grad_norm": 1.3058310882090556, - "learning_rate": 9.9722641784023e-06, - "loss": 0.0688, - "step": 618 - }, - { - "epoch": 0.1643653744025491, - "grad_norm": 1.0447117296584805, - "learning_rate": 9.972032781191297e-06, - "loss": 0.0576, - "step": 619 - }, - { - "epoch": 0.1646309081253319, - "grad_norm": 0.0992909067061662, - "learning_rate": 9.971800425431014e-06, - "loss": 0.0025, - "step": 620 - }, - { - "epoch": 0.16489644184811472, - "grad_norm": 2.5678651091451097, - "learning_rate": 9.971567111166246e-06, - "loss": 0.2461, - "step": 621 - }, - { - "epoch": 0.1651619755708975, - "grad_norm": 1.3926560176517229, - "learning_rate": 9.971332838441973e-06, - "loss": 0.0525, - "step": 622 - }, - { - "epoch": 0.1654275092936803, - "grad_norm": 0.1312253383616017, - "learning_rate": 9.97109760730336e-06, - "loss": 0.004, - "step": 623 - }, - { - "epoch": 0.1656930430164631, - "grad_norm": 2.1426039577089244, - "learning_rate": 9.970861417795757e-06, - "loss": 0.1836, - "step": 624 - }, - { - "epoch": 0.16595857673924588, - "grad_norm": 1.4296002668699905, - "learning_rate": 9.970624269964697e-06, - "loss": 0.0889, - "step": 625 - }, - { - "epoch": 0.16622411046202867, - "grad_norm": 0.40810968379724855, - "learning_rate": 9.970386163855903e-06, - "loss": 0.0229, - "step": 626 - }, - { - "epoch": 0.16648964418481146, - "grad_norm": 0.026519673065559527, - "learning_rate": 9.970147099515277e-06, - "loss": 0.0006, - "step": 627 - }, - { - "epoch": 0.16675517790759425, - "grad_norm": 0.9111033300992362, - "learning_rate": 9.969907076988907e-06, - "loss": 0.0576, - "step": 628 - }, - { - "epoch": 0.16702071163037704, - "grad_norm": 1.84251401314855, - "learning_rate": 9.969666096323068e-06, - "loss": 0.1641, - "step": 629 - }, - { - "epoch": 0.16728624535315986, - "grad_norm": 1.1977915299686255, - "learning_rate": 9.969424157564215e-06, - "loss": 0.0752, - "step": 630 - }, - { - "epoch": 0.16755177907594265, - "grad_norm": 1.1596796848128883, - "learning_rate": 9.969181260758995e-06, - "loss": 0.0476, - "step": 631 - }, - { - "epoch": 0.16781731279872544, - "grad_norm": 0.7311624184389818, - "learning_rate": 9.968937405954235e-06, - "loss": 0.0576, - "step": 632 - }, - { - "epoch": 0.16808284652150823, - "grad_norm": 0.43066270261344336, - "learning_rate": 9.968692593196944e-06, - "loss": 0.0317, - "step": 633 - }, - { - "epoch": 0.16834838024429102, - "grad_norm": 1.0570009180451556, - "learning_rate": 9.968446822534324e-06, - "loss": 0.1191, - "step": 634 - }, - { - "epoch": 0.16861391396707381, - "grad_norm": 1.7875303182362978, - "learning_rate": 9.968200094013751e-06, - "loss": 0.1279, - "step": 635 - }, - { - "epoch": 0.1688794476898566, - "grad_norm": 0.8109838110303486, - "learning_rate": 9.967952407682796e-06, - "loss": 0.0525, - "step": 636 - }, - { - "epoch": 0.1691449814126394, - "grad_norm": 0.995056083014455, - "learning_rate": 9.967703763589208e-06, - "loss": 0.0889, - "step": 637 - }, - { - "epoch": 0.16941051513542218, - "grad_norm": 0.7317688073360701, - "learning_rate": 9.967454161780923e-06, - "loss": 0.0576, - "step": 638 - }, - { - "epoch": 0.169676048858205, - "grad_norm": 0.27467746292088824, - "learning_rate": 9.967203602306062e-06, - "loss": 0.0206, - "step": 639 - }, - { - "epoch": 0.1699415825809878, - "grad_norm": 1.0717423043804712, - "learning_rate": 9.966952085212929e-06, - "loss": 0.1455, - "step": 640 - }, - { - "epoch": 0.17020711630377058, - "grad_norm": 0.21472091966860316, - "learning_rate": 9.966699610550013e-06, - "loss": 0.0146, - "step": 641 - }, - { - "epoch": 0.17047265002655337, - "grad_norm": 0.5320762490185771, - "learning_rate": 9.96644617836599e-06, - "loss": 0.0216, - "step": 642 - }, - { - "epoch": 0.17073818374933616, - "grad_norm": 0.9624120570928735, - "learning_rate": 9.966191788709716e-06, - "loss": 0.1035, - "step": 643 - }, - { - "epoch": 0.17100371747211895, - "grad_norm": 0.6950471061549814, - "learning_rate": 9.965936441630237e-06, - "loss": 0.0693, - "step": 644 - }, - { - "epoch": 0.17126925119490174, - "grad_norm": 1.060844145170164, - "learning_rate": 9.965680137176778e-06, - "loss": 0.1553, - "step": 645 - }, - { - "epoch": 0.17153478491768454, - "grad_norm": 0.6493031275434652, - "learning_rate": 9.965422875398756e-06, - "loss": 0.0576, - "step": 646 - }, - { - "epoch": 0.17180031864046733, - "grad_norm": 0.2802358255478571, - "learning_rate": 9.965164656345765e-06, - "loss": 0.0206, - "step": 647 - }, - { - "epoch": 0.17206585236325014, - "grad_norm": 1.1889779284390927, - "learning_rate": 9.964905480067585e-06, - "loss": 0.1113, - "step": 648 - }, - { - "epoch": 0.17233138608603293, - "grad_norm": 0.48129568513721377, - "learning_rate": 9.964645346614186e-06, - "loss": 0.0432, - "step": 649 - }, - { - "epoch": 0.17259691980881572, - "grad_norm": 0.5955170121717723, - "learning_rate": 9.964384256035715e-06, - "loss": 0.0579, - "step": 650 - }, - { - "epoch": 0.17286245353159851, - "grad_norm": 0.4053980801589404, - "learning_rate": 9.964122208382511e-06, - "loss": 0.0354, - "step": 651 - }, - { - "epoch": 0.1731279872543813, - "grad_norm": 0.43450493299788406, - "learning_rate": 9.96385920370509e-06, - "loss": 0.0391, - "step": 652 - }, - { - "epoch": 0.1733935209771641, - "grad_norm": 0.4490492804675279, - "learning_rate": 9.963595242054158e-06, - "loss": 0.0354, - "step": 653 - }, - { - "epoch": 0.17365905469994689, - "grad_norm": 0.5527458067392451, - "learning_rate": 9.963330323480604e-06, - "loss": 0.0525, - "step": 654 - }, - { - "epoch": 0.17392458842272968, - "grad_norm": 0.5425679761751954, - "learning_rate": 9.9630644480355e-06, - "loss": 0.0525, - "step": 655 - }, - { - "epoch": 0.1741901221455125, - "grad_norm": 0.35110925559228934, - "learning_rate": 9.962797615770105e-06, - "loss": 0.0286, - "step": 656 - }, - { - "epoch": 0.17445565586829528, - "grad_norm": 0.40723902213459534, - "learning_rate": 9.96252982673586e-06, - "loss": 0.0354, - "step": 657 - }, - { - "epoch": 0.17472118959107807, - "grad_norm": 0.29867369752067024, - "learning_rate": 9.962261080984394e-06, - "loss": 0.0229, - "step": 658 - }, - { - "epoch": 0.17498672331386086, - "grad_norm": 0.30130627923135, - "learning_rate": 9.961991378567514e-06, - "loss": 0.0229, - "step": 659 - }, - { - "epoch": 0.17525225703664365, - "grad_norm": 0.4704399733543959, - "learning_rate": 9.961720719537217e-06, - "loss": 0.0432, - "step": 660 - }, - { - "epoch": 0.17551779075942645, - "grad_norm": 0.8375442861352078, - "learning_rate": 9.961449103945684e-06, - "loss": 0.0889, - "step": 661 - }, - { - "epoch": 0.17578332448220924, - "grad_norm": 0.13114805545544933, - "learning_rate": 9.96117653184528e-06, - "loss": 0.0045, - "step": 662 - }, - { - "epoch": 0.17604885820499203, - "grad_norm": 0.31507559879440783, - "learning_rate": 9.960903003288551e-06, - "loss": 0.0104, - "step": 663 - }, - { - "epoch": 0.17631439192777482, - "grad_norm": 0.38090689585552884, - "learning_rate": 9.960628518328231e-06, - "loss": 0.0231, - "step": 664 - }, - { - "epoch": 0.17657992565055763, - "grad_norm": 0.36367373348514975, - "learning_rate": 9.960353077017238e-06, - "loss": 0.0286, - "step": 665 - }, - { - "epoch": 0.17684545937334042, - "grad_norm": 0.14099382022025622, - "learning_rate": 9.960076679408675e-06, - "loss": 0.0092, - "step": 666 - }, - { - "epoch": 0.17711099309612321, - "grad_norm": 0.3729088508977205, - "learning_rate": 9.959799325555825e-06, - "loss": 0.0317, - "step": 667 - }, - { - "epoch": 0.177376526818906, - "grad_norm": 0.20249754798930547, - "learning_rate": 9.95952101551216e-06, - "loss": 0.0035, - "step": 668 - }, - { - "epoch": 0.1776420605416888, - "grad_norm": 0.728096668821041, - "learning_rate": 9.959241749331339e-06, - "loss": 0.0391, - "step": 669 - }, - { - "epoch": 0.17790759426447159, - "grad_norm": 0.38161937312578453, - "learning_rate": 9.958961527067194e-06, - "loss": 0.0165, - "step": 670 - }, - { - "epoch": 0.17817312798725438, - "grad_norm": 1.1222249017413026, - "learning_rate": 9.958680348773753e-06, - "loss": 0.1455, - "step": 671 - }, - { - "epoch": 0.17843866171003717, - "grad_norm": 1.069255220495418, - "learning_rate": 9.958398214505225e-06, - "loss": 0.1279, - "step": 672 - }, - { - "epoch": 0.17870419543281996, - "grad_norm": 0.6232120562558745, - "learning_rate": 9.958115124315996e-06, - "loss": 0.0432, - "step": 673 - }, - { - "epoch": 0.17896972915560277, - "grad_norm": 0.5800413383472506, - "learning_rate": 9.95783107826065e-06, - "loss": 0.0125, - "step": 674 - }, - { - "epoch": 0.17923526287838557, - "grad_norm": 0.1579792821012369, - "learning_rate": 9.957546076393944e-06, - "loss": 0.0103, - "step": 675 - }, - { - "epoch": 0.17950079660116836, - "grad_norm": 0.8480282976053962, - "learning_rate": 9.957260118770822e-06, - "loss": 0.0815, - "step": 676 - }, - { - "epoch": 0.17976633032395115, - "grad_norm": 0.9382679997758676, - "learning_rate": 9.956973205446416e-06, - "loss": 0.0957, - "step": 677 - }, - { - "epoch": 0.18003186404673394, - "grad_norm": 0.6567431263540028, - "learning_rate": 9.956685336476037e-06, - "loss": 0.0525, - "step": 678 - }, - { - "epoch": 0.18029739776951673, - "grad_norm": 0.9345447093648367, - "learning_rate": 9.956396511915182e-06, - "loss": 0.0688, - "step": 679 - }, - { - "epoch": 0.18056293149229952, - "grad_norm": 1.028643546453173, - "learning_rate": 9.956106731819538e-06, - "loss": 0.0752, - "step": 680 - }, - { - "epoch": 0.1808284652150823, - "grad_norm": 1.1200381609657348, - "learning_rate": 9.955815996244967e-06, - "loss": 0.0889, - "step": 681 - }, - { - "epoch": 0.1810939989378651, - "grad_norm": 0.49251673343344965, - "learning_rate": 9.955524305247518e-06, - "loss": 0.0391, - "step": 682 - }, - { - "epoch": 0.18135953266064792, - "grad_norm": 0.5170146835568467, - "learning_rate": 9.955231658883432e-06, - "loss": 0.0391, - "step": 683 - }, - { - "epoch": 0.1816250663834307, - "grad_norm": 0.7825780874484762, - "learning_rate": 9.954938057209121e-06, - "loss": 0.0525, - "step": 684 - }, - { - "epoch": 0.1818906001062135, - "grad_norm": 1.3290725760750859, - "learning_rate": 9.954643500281191e-06, - "loss": 0.1934, - "step": 685 - }, - { - "epoch": 0.1821561338289963, - "grad_norm": 1.2730693589730842, - "learning_rate": 9.95434798815643e-06, - "loss": 0.03, - "step": 686 - }, - { - "epoch": 0.18242166755177908, - "grad_norm": 0.29840206739057834, - "learning_rate": 9.954051520891808e-06, - "loss": 0.0229, - "step": 687 - }, - { - "epoch": 0.18268720127456187, - "grad_norm": 1.283727575547433, - "learning_rate": 9.95375409854448e-06, - "loss": 0.1836, - "step": 688 - }, - { - "epoch": 0.18295273499734466, - "grad_norm": 0.36148423987976935, - "learning_rate": 9.953455721171784e-06, - "loss": 0.0286, - "step": 689 - }, - { - "epoch": 0.18321826872012745, - "grad_norm": 0.26464506058473897, - "learning_rate": 9.953156388831246e-06, - "loss": 0.0184, - "step": 690 - }, - { - "epoch": 0.18348380244291024, - "grad_norm": 0.26815836434680423, - "learning_rate": 9.952856101580574e-06, - "loss": 0.0206, - "step": 691 - }, - { - "epoch": 0.18374933616569306, - "grad_norm": 0.9433969771302444, - "learning_rate": 9.952554859477661e-06, - "loss": 0.0815, - "step": 692 - }, - { - "epoch": 0.18401486988847585, - "grad_norm": 0.8371591636277368, - "learning_rate": 9.95225266258058e-06, - "loss": 0.0688, - "step": 693 - }, - { - "epoch": 0.18428040361125864, - "grad_norm": 0.31710782740299065, - "learning_rate": 9.95194951094759e-06, - "loss": 0.0255, - "step": 694 - }, - { - "epoch": 0.18454593733404143, - "grad_norm": 0.7792504139919215, - "learning_rate": 9.951645404637139e-06, - "loss": 0.063, - "step": 695 - }, - { - "epoch": 0.18481147105682422, - "grad_norm": 0.24864366834341872, - "learning_rate": 9.951340343707852e-06, - "loss": 0.0184, - "step": 696 - }, - { - "epoch": 0.185077004779607, - "grad_norm": 1.3198814057549204, - "learning_rate": 9.951034328218543e-06, - "loss": 0.0815, - "step": 697 - }, - { - "epoch": 0.1853425385023898, - "grad_norm": 0.1349697962445958, - "learning_rate": 9.950727358228207e-06, - "loss": 0.004, - "step": 698 - }, - { - "epoch": 0.1856080722251726, - "grad_norm": 0.7940800117641265, - "learning_rate": 9.950419433796026e-06, - "loss": 0.0525, - "step": 699 - }, - { - "epoch": 0.18587360594795538, - "grad_norm": 0.17975503032083975, - "learning_rate": 9.950110554981363e-06, - "loss": 0.0045, - "step": 700 - }, - { - "epoch": 0.1861391396707382, - "grad_norm": 0.48368320799159703, - "learning_rate": 9.949800721843765e-06, - "loss": 0.0391, - "step": 701 - }, - { - "epoch": 0.186404673393521, - "grad_norm": 0.31589438590765767, - "learning_rate": 9.949489934442966e-06, - "loss": 0.0255, - "step": 702 - }, - { - "epoch": 0.18667020711630378, - "grad_norm": 0.31533609255408135, - "learning_rate": 9.949178192838881e-06, - "loss": 0.0258, - "step": 703 - }, - { - "epoch": 0.18693574083908657, - "grad_norm": 1.3466582105948832, - "learning_rate": 9.94886549709161e-06, - "loss": 0.1279, - "step": 704 - }, - { - "epoch": 0.18720127456186936, - "grad_norm": 0.910289445658781, - "learning_rate": 9.948551847261439e-06, - "loss": 0.0815, - "step": 705 - }, - { - "epoch": 0.18746680828465215, - "grad_norm": 0.21688834388185138, - "learning_rate": 9.948237243408834e-06, - "loss": 0.0165, - "step": 706 - }, - { - "epoch": 0.18773234200743494, - "grad_norm": 0.021656248810182913, - "learning_rate": 9.947921685594448e-06, - "loss": 0.0005, - "step": 707 - }, - { - "epoch": 0.18799787573021773, - "grad_norm": 0.9878506170969464, - "learning_rate": 9.947605173879116e-06, - "loss": 0.1279, - "step": 708 - }, - { - "epoch": 0.18826340945300052, - "grad_norm": 1.1026344698990722, - "learning_rate": 9.947287708323858e-06, - "loss": 0.1553, - "step": 709 - }, - { - "epoch": 0.18852894317578334, - "grad_norm": 0.6327076367042769, - "learning_rate": 9.946969288989878e-06, - "loss": 0.0579, - "step": 710 - }, - { - "epoch": 0.18879447689856613, - "grad_norm": 0.972281811713341, - "learning_rate": 9.946649915938562e-06, - "loss": 0.1367, - "step": 711 - }, - { - "epoch": 0.18906001062134892, - "grad_norm": 0.9045454418723899, - "learning_rate": 9.946329589231484e-06, - "loss": 0.1113, - "step": 712 - }, - { - "epoch": 0.1893255443441317, - "grad_norm": 1.1652383753491669, - "learning_rate": 9.946008308930396e-06, - "loss": 0.1035, - "step": 713 - }, - { - "epoch": 0.1895910780669145, - "grad_norm": 0.5983398386283125, - "learning_rate": 9.94568607509724e-06, - "loss": 0.0525, - "step": 714 - }, - { - "epoch": 0.1898566117896973, - "grad_norm": 0.9406342065879315, - "learning_rate": 9.945362887794139e-06, - "loss": 0.1279, - "step": 715 - }, - { - "epoch": 0.19012214551248008, - "grad_norm": 1.0678169277527272, - "learning_rate": 9.945038747083396e-06, - "loss": 0.1455, - "step": 716 - }, - { - "epoch": 0.19038767923526287, - "grad_norm": 0.6152395770617992, - "learning_rate": 9.944713653027505e-06, - "loss": 0.063, - "step": 717 - }, - { - "epoch": 0.19065321295804566, - "grad_norm": 0.7306184341026286, - "learning_rate": 9.944387605689139e-06, - "loss": 0.0693, - "step": 718 - }, - { - "epoch": 0.19091874668082848, - "grad_norm": 0.32716061364977655, - "learning_rate": 9.944060605131157e-06, - "loss": 0.0165, - "step": 719 - }, - { - "epoch": 0.19118428040361127, - "grad_norm": 0.9594346998876806, - "learning_rate": 9.943732651416597e-06, - "loss": 0.0957, - "step": 720 - }, - { - "epoch": 0.19144981412639406, - "grad_norm": 0.8678737283379834, - "learning_rate": 9.94340374460869e-06, - "loss": 0.1035, - "step": 721 - }, - { - "epoch": 0.19171534784917685, - "grad_norm": 0.5586020989155928, - "learning_rate": 9.943073884770844e-06, - "loss": 0.0579, - "step": 722 - }, - { - "epoch": 0.19198088157195964, - "grad_norm": 0.6710124613449318, - "learning_rate": 9.94274307196665e-06, - "loss": 0.0752, - "step": 723 - }, - { - "epoch": 0.19224641529474243, - "grad_norm": 0.11750671730822675, - "learning_rate": 9.942411306259887e-06, - "loss": 0.0041, - "step": 724 - }, - { - "epoch": 0.19251194901752522, - "grad_norm": 0.9492120597858461, - "learning_rate": 9.942078587714514e-06, - "loss": 0.1113, - "step": 725 - }, - { - "epoch": 0.192777482740308, - "grad_norm": 0.7578093561638131, - "learning_rate": 9.941744916394675e-06, - "loss": 0.0889, - "step": 726 - }, - { - "epoch": 0.1930430164630908, - "grad_norm": 0.0703405751086318, - "learning_rate": 9.9414102923647e-06, - "loss": 0.0017, - "step": 727 - }, - { - "epoch": 0.19330855018587362, - "grad_norm": 0.2720572743378476, - "learning_rate": 9.941074715689097e-06, - "loss": 0.0132, - "step": 728 - }, - { - "epoch": 0.1935740839086564, - "grad_norm": 0.47926399744773224, - "learning_rate": 9.940738186432565e-06, - "loss": 0.0479, - "step": 729 - }, - { - "epoch": 0.1938396176314392, - "grad_norm": 0.4960442155304019, - "learning_rate": 9.94040070465998e-06, - "loss": 0.053, - "step": 730 - }, - { - "epoch": 0.194105151354222, - "grad_norm": 0.6099810000661499, - "learning_rate": 9.940062270436406e-06, - "loss": 0.0752, - "step": 731 - }, - { - "epoch": 0.19437068507700478, - "grad_norm": 0.5317125466849125, - "learning_rate": 9.93972288382709e-06, - "loss": 0.0579, - "step": 732 - }, - { - "epoch": 0.19463621879978757, - "grad_norm": 0.405350264279859, - "learning_rate": 9.93938254489746e-06, - "loss": 0.0359, - "step": 733 - }, - { - "epoch": 0.19490175252257036, - "grad_norm": 0.5550223239319415, - "learning_rate": 9.93904125371313e-06, - "loss": 0.063, - "step": 734 - }, - { - "epoch": 0.19516728624535315, - "grad_norm": 0.4548926771748096, - "learning_rate": 9.938699010339898e-06, - "loss": 0.0479, - "step": 735 - }, - { - "epoch": 0.19543281996813594, - "grad_norm": 0.4301648170034228, - "learning_rate": 9.93835581484374e-06, - "loss": 0.0432, - "step": 736 - }, - { - "epoch": 0.19569835369091876, - "grad_norm": 0.5759338926245092, - "learning_rate": 9.938011667290828e-06, - "loss": 0.0476, - "step": 737 - }, - { - "epoch": 0.19596388741370155, - "grad_norm": 0.47327836419648434, - "learning_rate": 9.9376665677475e-06, - "loss": 0.0525, - "step": 738 - }, - { - "epoch": 0.19622942113648434, - "grad_norm": 0.1587521869962895, - "learning_rate": 9.937320516280297e-06, - "loss": 0.0051, - "step": 739 - }, - { - "epoch": 0.19649495485926713, - "grad_norm": 0.29964993842166077, - "learning_rate": 9.936973512955927e-06, - "loss": 0.0132, - "step": 740 - }, - { - "epoch": 0.19676048858204992, - "grad_norm": 0.5192574350505638, - "learning_rate": 9.936625557841289e-06, - "loss": 0.0525, - "step": 741 - }, - { - "epoch": 0.1970260223048327, - "grad_norm": 0.6174706812099604, - "learning_rate": 9.936276651003465e-06, - "loss": 0.0693, - "step": 742 - }, - { - "epoch": 0.1972915560276155, - "grad_norm": 0.4060064868344745, - "learning_rate": 9.935926792509723e-06, - "loss": 0.0435, - "step": 743 - }, - { - "epoch": 0.1975570897503983, - "grad_norm": 0.6447420229580227, - "learning_rate": 9.935575982427507e-06, - "loss": 0.082, - "step": 744 - }, - { - "epoch": 0.19782262347318108, - "grad_norm": 0.8009583314059334, - "learning_rate": 9.935224220824452e-06, - "loss": 0.1191, - "step": 745 - }, - { - "epoch": 0.1980881571959639, - "grad_norm": 0.7682777474476555, - "learning_rate": 9.934871507768374e-06, - "loss": 0.0688, - "step": 746 - }, - { - "epoch": 0.1983536909187467, - "grad_norm": 0.27537017109972733, - "learning_rate": 9.934517843327269e-06, - "loss": 0.0104, - "step": 747 - }, - { - "epoch": 0.19861922464152948, - "grad_norm": 0.4708650125151908, - "learning_rate": 9.93416322756932e-06, - "loss": 0.0479, - "step": 748 - }, - { - "epoch": 0.19888475836431227, - "grad_norm": 0.706657435045738, - "learning_rate": 9.933807660562898e-06, - "loss": 0.082, - "step": 749 - }, - { - "epoch": 0.19915029208709506, - "grad_norm": 0.36488237415402874, - "learning_rate": 9.933451142376545e-06, - "loss": 0.0354, - "step": 750 - }, - { - "epoch": 0.19941582580987785, - "grad_norm": 0.4861196249654044, - "learning_rate": 9.933093673078994e-06, - "loss": 0.0525, - "step": 751 - }, - { - "epoch": 0.19968135953266064, - "grad_norm": 0.5291778145248267, - "learning_rate": 9.932735252739168e-06, - "loss": 0.0393, - "step": 752 - }, - { - "epoch": 0.19994689325544343, - "grad_norm": 0.35001230301241637, - "learning_rate": 9.93237588142616e-06, - "loss": 0.0354, - "step": 753 - }, - { - "epoch": 0.20021242697822622, - "grad_norm": 2.417388946512036, - "learning_rate": 9.932015559209255e-06, - "loss": 0.0277, - "step": 754 - }, - { - "epoch": 0.20047796070100904, - "grad_norm": 0.1251433570133762, - "learning_rate": 9.931654286157915e-06, - "loss": 0.0032, - "step": 755 - }, - { - "epoch": 0.20074349442379183, - "grad_norm": 0.209414281849796, - "learning_rate": 9.931292062341793e-06, - "loss": 0.0047, - "step": 756 - }, - { - "epoch": 0.20100902814657462, - "grad_norm": 0.008292667150507884, - "learning_rate": 9.930928887830721e-06, - "loss": 0.0002, - "step": 757 - }, - { - "epoch": 0.2012745618693574, - "grad_norm": 0.38647994443034445, - "learning_rate": 9.930564762694715e-06, - "loss": 0.0393, - "step": 758 - }, - { - "epoch": 0.2015400955921402, - "grad_norm": 0.6378261596904626, - "learning_rate": 9.930199687003972e-06, - "loss": 0.0579, - "step": 759 - }, - { - "epoch": 0.201805629314923, - "grad_norm": 2.3592667920352035, - "learning_rate": 9.929833660828878e-06, - "loss": 0.1455, - "step": 760 - }, - { - "epoch": 0.20207116303770578, - "grad_norm": 0.6334539242279383, - "learning_rate": 9.929466684239993e-06, - "loss": 0.0525, - "step": 761 - }, - { - "epoch": 0.20233669676048857, - "grad_norm": 0.30334797132909946, - "learning_rate": 9.929098757308071e-06, - "loss": 0.0286, - "step": 762 - }, - { - "epoch": 0.20260223048327136, - "grad_norm": 0.30034960277983547, - "learning_rate": 9.928729880104042e-06, - "loss": 0.0258, - "step": 763 - }, - { - "epoch": 0.20286776420605418, - "grad_norm": 0.7158437599898999, - "learning_rate": 9.928360052699019e-06, - "loss": 0.0579, - "step": 764 - }, - { - "epoch": 0.20313329792883697, - "grad_norm": 0.4427519026261093, - "learning_rate": 9.927989275164305e-06, - "loss": 0.0432, - "step": 765 - }, - { - "epoch": 0.20339883165161976, - "grad_norm": 1.223371374907059, - "learning_rate": 9.927617547571377e-06, - "loss": 0.0957, - "step": 766 - }, - { - "epoch": 0.20366436537440255, - "grad_norm": 0.8525259837165133, - "learning_rate": 9.927244869991905e-06, - "loss": 0.1191, - "step": 767 - }, - { - "epoch": 0.20392989909718534, - "grad_norm": 0.7529517092699473, - "learning_rate": 9.92687124249773e-06, - "loss": 0.0889, - "step": 768 - }, - { - "epoch": 0.20419543281996813, - "grad_norm": 0.4231343502487387, - "learning_rate": 9.926496665160887e-06, - "loss": 0.0393, - "step": 769 - }, - { - "epoch": 0.20446096654275092, - "grad_norm": 0.02781249909477673, - "learning_rate": 9.926121138053591e-06, - "loss": 0.0005, - "step": 770 - }, - { - "epoch": 0.2047265002655337, - "grad_norm": 0.32019858843907156, - "learning_rate": 9.925744661248238e-06, - "loss": 0.0286, - "step": 771 - }, - { - "epoch": 0.20499203398831653, - "grad_norm": 0.34210995299202096, - "learning_rate": 9.925367234817407e-06, - "loss": 0.0317, - "step": 772 - }, - { - "epoch": 0.20525756771109932, - "grad_norm": 0.29667952505177037, - "learning_rate": 9.924988858833862e-06, - "loss": 0.0258, - "step": 773 - }, - { - "epoch": 0.2055231014338821, - "grad_norm": 0.775170587796152, - "learning_rate": 9.924609533370551e-06, - "loss": 0.082, - "step": 774 - }, - { - "epoch": 0.2057886351566649, - "grad_norm": 0.8776207151240932, - "learning_rate": 9.924229258500601e-06, - "loss": 0.0688, - "step": 775 - }, - { - "epoch": 0.2060541688794477, - "grad_norm": 0.34921604699592496, - "learning_rate": 9.923848034297327e-06, - "loss": 0.0286, - "step": 776 - }, - { - "epoch": 0.20631970260223048, - "grad_norm": 0.7094770354343616, - "learning_rate": 9.923465860834224e-06, - "loss": 0.063, - "step": 777 - }, - { - "epoch": 0.20658523632501327, - "grad_norm": 0.8576553061781719, - "learning_rate": 9.923082738184969e-06, - "loss": 0.0688, - "step": 778 - }, - { - "epoch": 0.20685077004779606, - "grad_norm": 0.4617621570202573, - "learning_rate": 9.922698666423424e-06, - "loss": 0.0126, - "step": 779 - }, - { - "epoch": 0.20711630377057885, - "grad_norm": 0.2701329483492822, - "learning_rate": 9.922313645623634e-06, - "loss": 0.0205, - "step": 780 - }, - { - "epoch": 0.20738183749336167, - "grad_norm": 1.2246770118493904, - "learning_rate": 9.921927675859828e-06, - "loss": 0.0576, - "step": 781 - }, - { - "epoch": 0.20764737121614446, - "grad_norm": 0.29636608608865783, - "learning_rate": 9.921540757206414e-06, - "loss": 0.0258, - "step": 782 - }, - { - "epoch": 0.20791290493892725, - "grad_norm": 1.8125953283598475, - "learning_rate": 9.921152889737985e-06, - "loss": 0.1367, - "step": 783 - }, - { - "epoch": 0.20817843866171004, - "grad_norm": 0.03047349596838622, - "learning_rate": 9.920764073529319e-06, - "loss": 0.0006, - "step": 784 - }, - { - "epoch": 0.20844397238449283, - "grad_norm": 0.16957814720325154, - "learning_rate": 9.920374308655375e-06, - "loss": 0.0041, - "step": 785 - }, - { - "epoch": 0.20870950610727562, - "grad_norm": 0.7111174333404663, - "learning_rate": 9.919983595191295e-06, - "loss": 0.0165, - "step": 786 - }, - { - "epoch": 0.2089750398300584, - "grad_norm": 1.244139227465599, - "learning_rate": 9.919591933212403e-06, - "loss": 0.0815, - "step": 787 - }, - { - "epoch": 0.2092405735528412, - "grad_norm": 0.34737267129055766, - "learning_rate": 9.919199322794209e-06, - "loss": 0.0286, - "step": 788 - }, - { - "epoch": 0.209506107275624, - "grad_norm": 0.9063191327124598, - "learning_rate": 9.9188057640124e-06, - "loss": 0.0479, - "step": 789 - }, - { - "epoch": 0.2097716409984068, - "grad_norm": 1.1996030088378673, - "learning_rate": 9.918411256942852e-06, - "loss": 0.1279, - "step": 790 - }, - { - "epoch": 0.2100371747211896, - "grad_norm": 0.3272617386081918, - "learning_rate": 9.918015801661622e-06, - "loss": 0.0231, - "step": 791 - }, - { - "epoch": 0.2103027084439724, - "grad_norm": 0.20150941486275553, - "learning_rate": 9.91761939824495e-06, - "loss": 0.0146, - "step": 792 - }, - { - "epoch": 0.21056824216675518, - "grad_norm": 0.9406775259417158, - "learning_rate": 9.917222046769252e-06, - "loss": 0.1035, - "step": 793 - }, - { - "epoch": 0.21083377588953797, - "grad_norm": 0.33726088549905536, - "learning_rate": 9.916823747311138e-06, - "loss": 0.0255, - "step": 794 - }, - { - "epoch": 0.21109930961232076, - "grad_norm": 0.24491234727145536, - "learning_rate": 9.916424499947395e-06, - "loss": 0.0184, - "step": 795 - }, - { - "epoch": 0.21136484333510355, - "grad_norm": 0.1878113074555544, - "learning_rate": 9.91602430475499e-06, - "loss": 0.0115, - "step": 796 - }, - { - "epoch": 0.21163037705788634, - "grad_norm": 0.030677918403306813, - "learning_rate": 9.91562316181108e-06, - "loss": 0.0008, - "step": 797 - }, - { - "epoch": 0.21189591078066913, - "grad_norm": 0.8660378755286174, - "learning_rate": 9.915221071193e-06, - "loss": 0.0476, - "step": 798 - }, - { - "epoch": 0.21216144450345195, - "grad_norm": 0.8549555937928719, - "learning_rate": 9.914818032978263e-06, - "loss": 0.0525, - "step": 799 - }, - { - "epoch": 0.21242697822623474, - "grad_norm": 0.6372317187162225, - "learning_rate": 9.914414047244578e-06, - "loss": 0.0317, - "step": 800 - }, - { - "epoch": 0.21269251194901753, - "grad_norm": 0.9907320369841311, - "learning_rate": 9.914009114069824e-06, - "loss": 0.0525, - "step": 801 - }, - { - "epoch": 0.21295804567180032, - "grad_norm": 1.3053615357778507, - "learning_rate": 9.913603233532067e-06, - "loss": 0.0391, - "step": 802 - }, - { - "epoch": 0.2132235793945831, - "grad_norm": 0.5689635893757348, - "learning_rate": 9.913196405709557e-06, - "loss": 0.0286, - "step": 803 - }, - { - "epoch": 0.2134891131173659, - "grad_norm": 1.538445081246666, - "learning_rate": 9.912788630680727e-06, - "loss": 0.0525, - "step": 804 - }, - { - "epoch": 0.2137546468401487, - "grad_norm": 0.4053859965192937, - "learning_rate": 9.91237990852419e-06, - "loss": 0.0165, - "step": 805 - }, - { - "epoch": 0.21402018056293148, - "grad_norm": 0.12115693287671359, - "learning_rate": 9.911970239318743e-06, - "loss": 0.0072, - "step": 806 - }, - { - "epoch": 0.21428571428571427, - "grad_norm": 0.02612612601513698, - "learning_rate": 9.911559623143364e-06, - "loss": 0.0004, - "step": 807 - }, - { - "epoch": 0.2145512480084971, - "grad_norm": 0.155521917709447, - "learning_rate": 9.911148060077216e-06, - "loss": 0.0092, - "step": 808 - }, - { - "epoch": 0.21481678173127988, - "grad_norm": 0.800502691704114, - "learning_rate": 9.910735550199647e-06, - "loss": 0.0317, - "step": 809 - }, - { - "epoch": 0.21508231545406267, - "grad_norm": 1.7498059645077653, - "learning_rate": 9.910322093590177e-06, - "loss": 0.1035, - "step": 810 - }, - { - "epoch": 0.21534784917684546, - "grad_norm": 0.5768858949644357, - "learning_rate": 9.90990769032852e-06, - "loss": 0.0255, - "step": 811 - }, - { - "epoch": 0.21561338289962825, - "grad_norm": 0.17436803776687265, - "learning_rate": 9.909492340494567e-06, - "loss": 0.0064, - "step": 812 - }, - { - "epoch": 0.21587891662241104, - "grad_norm": 1.2847308699648239, - "learning_rate": 9.909076044168393e-06, - "loss": 0.0476, - "step": 813 - }, - { - "epoch": 0.21614445034519383, - "grad_norm": 0.20641696265719725, - "learning_rate": 9.908658801430256e-06, - "loss": 0.0115, - "step": 814 - }, - { - "epoch": 0.21640998406797662, - "grad_norm": 0.4594930508016625, - "learning_rate": 9.908240612360593e-06, - "loss": 0.0205, - "step": 815 - }, - { - "epoch": 0.21667551779075941, - "grad_norm": 1.6460520494176336, - "learning_rate": 9.907821477040028e-06, - "loss": 0.0391, - "step": 816 - }, - { - "epoch": 0.21694105151354223, - "grad_norm": 0.006952372103528881, - "learning_rate": 9.907401395549364e-06, - "loss": 0.0, - "step": 817 - }, - { - "epoch": 0.21720658523632502, - "grad_norm": 1.625155011661868, - "learning_rate": 9.90698036796959e-06, - "loss": 0.2344, - "step": 818 - }, - { - "epoch": 0.2174721189591078, - "grad_norm": 0.0352752424800895, - "learning_rate": 9.906558394381872e-06, - "loss": 0.0017, - "step": 819 - }, - { - "epoch": 0.2177376526818906, - "grad_norm": 0.20879590616740903, - "learning_rate": 9.906135474867562e-06, - "loss": 0.0103, - "step": 820 - }, - { - "epoch": 0.2180031864046734, - "grad_norm": 1.6599267611839752, - "learning_rate": 9.905711609508198e-06, - "loss": 0.2676, - "step": 821 - }, - { - "epoch": 0.21826872012745618, - "grad_norm": 3.9796641629384975, - "learning_rate": 9.90528679838549e-06, - "loss": 0.1553, - "step": 822 - }, - { - "epoch": 0.21853425385023897, - "grad_norm": 0.0648341014830216, - "learning_rate": 9.904861041581341e-06, - "loss": 0.0035, - "step": 823 - }, - { - "epoch": 0.21879978757302176, - "grad_norm": 11.724369911160045, - "learning_rate": 9.904434339177831e-06, - "loss": 0.4199, - "step": 824 - }, - { - "epoch": 0.21906532129580455, - "grad_norm": 1.7629568442469772, - "learning_rate": 9.904006691257224e-06, - "loss": 0.2559, - "step": 825 - }, - { - "epoch": 0.21933085501858737, - "grad_norm": 0.10426293175973483, - "learning_rate": 9.903578097901963e-06, - "loss": 0.0064, - "step": 826 - }, - { - "epoch": 0.21959638874137016, - "grad_norm": 1.4924171036944354, - "learning_rate": 9.903148559194677e-06, - "loss": 0.1641, - "step": 827 - }, - { - "epoch": 0.21986192246415295, - "grad_norm": 1.3050509840328823, - "learning_rate": 9.902718075218176e-06, - "loss": 0.1934, - "step": 828 - }, - { - "epoch": 0.22012745618693574, - "grad_norm": 1.235993047459142, - "learning_rate": 9.902286646055453e-06, - "loss": 0.0576, - "step": 829 - }, - { - "epoch": 0.22039298990971853, - "grad_norm": 0.24248121114396165, - "learning_rate": 9.90185427178968e-06, - "loss": 0.0165, - "step": 830 - }, - { - "epoch": 0.22065852363250132, - "grad_norm": 4.175312997922109, - "learning_rate": 9.901420952504217e-06, - "loss": 0.0576, - "step": 831 - }, - { - "epoch": 0.22092405735528411, - "grad_norm": 3.2184596570988093, - "learning_rate": 9.9009866882826e-06, - "loss": 0.1279, - "step": 832 - }, - { - "epoch": 0.2211895910780669, - "grad_norm": 1.0077714273038476, - "learning_rate": 9.900551479208553e-06, - "loss": 0.0576, - "step": 833 - }, - { - "epoch": 0.2214551248008497, - "grad_norm": 0.5299304251634451, - "learning_rate": 9.900115325365975e-06, - "loss": 0.0432, - "step": 834 - }, - { - "epoch": 0.2217206585236325, - "grad_norm": 0.25144714721412126, - "learning_rate": 9.899678226838956e-06, - "loss": 0.0206, - "step": 835 - }, - { - "epoch": 0.2219861922464153, - "grad_norm": 1.4383846378372553, - "learning_rate": 9.89924018371176e-06, - "loss": 0.1191, - "step": 836 - }, - { - "epoch": 0.2222517259691981, - "grad_norm": 0.6128666274034679, - "learning_rate": 9.898801196068839e-06, - "loss": 0.0391, - "step": 837 - }, - { - "epoch": 0.22251725969198088, - "grad_norm": 1.6083587605392458, - "learning_rate": 9.898361263994823e-06, - "loss": 0.1035, - "step": 838 - }, - { - "epoch": 0.22278279341476367, - "grad_norm": 0.33105381078233614, - "learning_rate": 9.897920387574525e-06, - "loss": 0.0286, - "step": 839 - }, - { - "epoch": 0.22304832713754646, - "grad_norm": 0.581750205311232, - "learning_rate": 9.897478566892942e-06, - "loss": 0.0317, - "step": 840 - }, - { - "epoch": 0.22331386086032926, - "grad_norm": 1.103725743553766, - "learning_rate": 9.897035802035253e-06, - "loss": 0.0354, - "step": 841 - }, - { - "epoch": 0.22357939458311205, - "grad_norm": 0.41623629278157465, - "learning_rate": 9.896592093086816e-06, - "loss": 0.0354, - "step": 842 - }, - { - "epoch": 0.22384492830589484, - "grad_norm": 0.22870378430920638, - "learning_rate": 9.896147440133173e-06, - "loss": 0.002, - "step": 843 - }, - { - "epoch": 0.22411046202867765, - "grad_norm": 0.026554099075059814, - "learning_rate": 9.89570184326005e-06, - "loss": 0.0005, - "step": 844 - }, - { - "epoch": 0.22437599575146044, - "grad_norm": 1.0737697123349998, - "learning_rate": 9.895255302553349e-06, - "loss": 0.1641, - "step": 845 - }, - { - "epoch": 0.22464152947424323, - "grad_norm": 1.05755659361386, - "learning_rate": 9.89480781809916e-06, - "loss": 0.0688, - "step": 846 - }, - { - "epoch": 0.22490706319702602, - "grad_norm": 0.8318386749828283, - "learning_rate": 9.894359389983753e-06, - "loss": 0.1191, - "step": 847 - }, - { - "epoch": 0.22517259691980882, - "grad_norm": 0.38706813856524885, - "learning_rate": 9.89391001829358e-06, - "loss": 0.0354, - "step": 848 - }, - { - "epoch": 0.2254381306425916, - "grad_norm": 0.06799335395718269, - "learning_rate": 9.893459703115272e-06, - "loss": 0.0017, - "step": 849 - }, - { - "epoch": 0.2257036643653744, - "grad_norm": 0.36794444497590895, - "learning_rate": 9.893008444535647e-06, - "loss": 0.0093, - "step": 850 - }, - { - "epoch": 0.22596919808815719, - "grad_norm": 0.5439922261966197, - "learning_rate": 9.892556242641702e-06, - "loss": 0.0117, - "step": 851 - }, - { - "epoch": 0.22623473181093998, - "grad_norm": 0.8380785010711241, - "learning_rate": 9.892103097520615e-06, - "loss": 0.063, - "step": 852 - }, - { - "epoch": 0.2265002655337228, - "grad_norm": 0.5431138700099414, - "learning_rate": 9.89164900925975e-06, - "loss": 0.0288, - "step": 853 - }, - { - "epoch": 0.22676579925650558, - "grad_norm": 0.8146895584075347, - "learning_rate": 9.891193977946645e-06, - "loss": 0.1113, - "step": 854 - }, - { - "epoch": 0.22703133297928837, - "grad_norm": 0.6831359882459529, - "learning_rate": 9.890738003669029e-06, - "loss": 0.0688, - "step": 855 - }, - { - "epoch": 0.22729686670207117, - "grad_norm": 0.4121057526788246, - "learning_rate": 9.890281086514806e-06, - "loss": 0.0391, - "step": 856 - }, - { - "epoch": 0.22756240042485396, - "grad_norm": 0.7258123968019558, - "learning_rate": 9.889823226572066e-06, - "loss": 0.0752, - "step": 857 - }, - { - "epoch": 0.22782793414763675, - "grad_norm": 0.8294544573466394, - "learning_rate": 9.889364423929076e-06, - "loss": 0.1035, - "step": 858 - }, - { - "epoch": 0.22809346787041954, - "grad_norm": 1.051757101289739, - "learning_rate": 9.888904678674292e-06, - "loss": 0.1113, - "step": 859 - }, - { - "epoch": 0.22835900159320233, - "grad_norm": 1.3061340829563401, - "learning_rate": 9.888443990896344e-06, - "loss": 0.1113, - "step": 860 - }, - { - "epoch": 0.22862453531598512, - "grad_norm": 0.8993459080630722, - "learning_rate": 9.887982360684049e-06, - "loss": 0.0889, - "step": 861 - }, - { - "epoch": 0.22889006903876793, - "grad_norm": 0.7347323587084027, - "learning_rate": 9.887519788126404e-06, - "loss": 0.0752, - "step": 862 - }, - { - "epoch": 0.22915560276155073, - "grad_norm": 0.5477598732831455, - "learning_rate": 9.887056273312584e-06, - "loss": 0.0354, - "step": 863 - }, - { - "epoch": 0.22942113648433352, - "grad_norm": 0.35585991259844624, - "learning_rate": 9.886591816331953e-06, - "loss": 0.0233, - "step": 864 - }, - { - "epoch": 0.2296866702071163, - "grad_norm": 1.3171584886462089, - "learning_rate": 9.886126417274054e-06, - "loss": 0.1191, - "step": 865 - }, - { - "epoch": 0.2299522039298991, - "grad_norm": 0.5959273314376112, - "learning_rate": 9.885660076228606e-06, - "loss": 0.0354, - "step": 866 - }, - { - "epoch": 0.2302177376526819, - "grad_norm": 0.5887513008564544, - "learning_rate": 9.885192793285516e-06, - "loss": 0.063, - "step": 867 - }, - { - "epoch": 0.23048327137546468, - "grad_norm": 1.2261572998536514, - "learning_rate": 9.884724568534873e-06, - "loss": 0.1191, - "step": 868 - }, - { - "epoch": 0.23074880509824747, - "grad_norm": 3.6206803166086385, - "learning_rate": 9.884255402066943e-06, - "loss": 0.2891, - "step": 869 - }, - { - "epoch": 0.23101433882103026, - "grad_norm": 0.8824142077915467, - "learning_rate": 9.883785293972175e-06, - "loss": 0.0957, - "step": 870 - }, - { - "epoch": 0.23127987254381308, - "grad_norm": 0.46423245513429523, - "learning_rate": 9.8833142443412e-06, - "loss": 0.0393, - "step": 871 - }, - { - "epoch": 0.23154540626659587, - "grad_norm": 0.5138918912335925, - "learning_rate": 9.882842253264835e-06, - "loss": 0.0525, - "step": 872 - }, - { - "epoch": 0.23181093998937866, - "grad_norm": 0.5843357342302304, - "learning_rate": 9.882369320834068e-06, - "loss": 0.0693, - "step": 873 - }, - { - "epoch": 0.23207647371216145, - "grad_norm": 0.2520466827681795, - "learning_rate": 9.88189544714008e-06, - "loss": 0.0074, - "step": 874 - }, - { - "epoch": 0.23234200743494424, - "grad_norm": 0.5794422421534507, - "learning_rate": 9.881420632274226e-06, - "loss": 0.0693, - "step": 875 - }, - { - "epoch": 0.23260754115772703, - "grad_norm": 0.44568917664701574, - "learning_rate": 9.880944876328045e-06, - "loss": 0.0354, - "step": 876 - }, - { - "epoch": 0.23287307488050982, - "grad_norm": 0.5250220184195629, - "learning_rate": 9.88046817939326e-06, - "loss": 0.0579, - "step": 877 - }, - { - "epoch": 0.2331386086032926, - "grad_norm": 0.5550066871107131, - "learning_rate": 9.879990541561766e-06, - "loss": 0.063, - "step": 878 - }, - { - "epoch": 0.2334041423260754, - "grad_norm": 0.4307639707627933, - "learning_rate": 9.879511962925651e-06, - "loss": 0.0476, - "step": 879 - }, - { - "epoch": 0.23366967604885822, - "grad_norm": 2.7902947643287472, - "learning_rate": 9.879032443577179e-06, - "loss": 0.2344, - "step": 880 - }, - { - "epoch": 0.233935209771641, - "grad_norm": 0.14563410670518806, - "learning_rate": 9.878551983608794e-06, - "loss": 0.0036, - "step": 881 - }, - { - "epoch": 0.2342007434944238, - "grad_norm": 0.8774501103628786, - "learning_rate": 9.878070583113123e-06, - "loss": 0.0957, - "step": 882 - }, - { - "epoch": 0.2344662772172066, - "grad_norm": 0.5560581059426071, - "learning_rate": 9.877588242182976e-06, - "loss": 0.063, - "step": 883 - }, - { - "epoch": 0.23473181093998938, - "grad_norm": 3.7081276425034457, - "learning_rate": 9.877104960911341e-06, - "loss": 0.3301, - "step": 884 - }, - { - "epoch": 0.23499734466277217, - "grad_norm": 0.22236979379444108, - "learning_rate": 9.87662073939139e-06, - "loss": 0.0074, - "step": 885 - }, - { - "epoch": 0.23526287838555496, - "grad_norm": 0.4092243618958261, - "learning_rate": 9.876135577716475e-06, - "loss": 0.0259, - "step": 886 - }, - { - "epoch": 0.23552841210833775, - "grad_norm": 0.37933414096989954, - "learning_rate": 9.875649475980129e-06, - "loss": 0.0432, - "step": 887 - }, - { - "epoch": 0.23579394583112057, - "grad_norm": 0.5651151674620623, - "learning_rate": 9.875162434276066e-06, - "loss": 0.0579, - "step": 888 - }, - { - "epoch": 0.23605947955390336, - "grad_norm": 0.5162559290907474, - "learning_rate": 9.874674452698184e-06, - "loss": 0.0479, - "step": 889 - }, - { - "epoch": 0.23632501327668615, - "grad_norm": 0.637236975984389, - "learning_rate": 9.874185531340561e-06, - "loss": 0.0889, - "step": 890 - }, - { - "epoch": 0.23659054699946894, - "grad_norm": 0.37992930731240715, - "learning_rate": 9.87369567029745e-06, - "loss": 0.0432, - "step": 891 - }, - { - "epoch": 0.23685608072225173, - "grad_norm": 0.4304890878920463, - "learning_rate": 9.873204869663296e-06, - "loss": 0.0479, - "step": 892 - }, - { - "epoch": 0.23712161444503452, - "grad_norm": 0.15235504129186367, - "learning_rate": 9.872713129532717e-06, - "loss": 0.0052, - "step": 893 - }, - { - "epoch": 0.2373871481678173, - "grad_norm": 0.6165775215284957, - "learning_rate": 9.872220450000514e-06, - "loss": 0.0393, - "step": 894 - }, - { - "epoch": 0.2376526818906001, - "grad_norm": 0.11125726497429037, - "learning_rate": 9.87172683116167e-06, - "loss": 0.0018, - "step": 895 - }, - { - "epoch": 0.2379182156133829, - "grad_norm": 0.3081309691181636, - "learning_rate": 9.871232273111351e-06, - "loss": 0.0317, - "step": 896 - }, - { - "epoch": 0.2381837493361657, - "grad_norm": 0.428579523646676, - "learning_rate": 9.8707367759449e-06, - "loss": 0.0479, - "step": 897 - }, - { - "epoch": 0.2384492830589485, - "grad_norm": 1.2174720808088506, - "learning_rate": 9.870240339757844e-06, - "loss": 0.0752, - "step": 898 - }, - { - "epoch": 0.2387148167817313, - "grad_norm": 0.3515404509638987, - "learning_rate": 9.869742964645889e-06, - "loss": 0.0393, - "step": 899 - }, - { - "epoch": 0.23898035050451408, - "grad_norm": 0.36157621557162617, - "learning_rate": 9.869244650704924e-06, - "loss": 0.0393, - "step": 900 - }, - { - "epoch": 0.23924588422729687, - "grad_norm": 0.4552592244911392, - "learning_rate": 9.868745398031018e-06, - "loss": 0.0479, - "step": 901 - }, - { - "epoch": 0.23951141795007966, - "grad_norm": 0.7564817410807659, - "learning_rate": 9.86824520672042e-06, - "loss": 0.1191, - "step": 902 - }, - { - "epoch": 0.23977695167286245, - "grad_norm": 0.3519836929818125, - "learning_rate": 9.867744076869564e-06, - "loss": 0.0354, - "step": 903 - }, - { - "epoch": 0.24004248539564524, - "grad_norm": 0.5911859305106711, - "learning_rate": 9.867242008575056e-06, - "loss": 0.0579, - "step": 904 - }, - { - "epoch": 0.24030801911842803, - "grad_norm": 0.7584559784767734, - "learning_rate": 9.866739001933695e-06, - "loss": 0.1113, - "step": 905 - }, - { - "epoch": 0.24057355284121085, - "grad_norm": 0.7691613201005841, - "learning_rate": 9.866235057042453e-06, - "loss": 0.1035, - "step": 906 - }, - { - "epoch": 0.24083908656399364, - "grad_norm": 0.7497833122031072, - "learning_rate": 9.865730173998482e-06, - "loss": 0.1191, - "step": 907 - }, - { - "epoch": 0.24110462028677643, - "grad_norm": 0.754572113206245, - "learning_rate": 9.86522435289912e-06, - "loss": 0.1279, - "step": 908 - }, - { - "epoch": 0.24137015400955922, - "grad_norm": 0.8672227298778474, - "learning_rate": 9.864717593841884e-06, - "loss": 0.0752, - "step": 909 - }, - { - "epoch": 0.241635687732342, - "grad_norm": 0.4009604044104567, - "learning_rate": 9.864209896924468e-06, - "loss": 0.0393, - "step": 910 - }, - { - "epoch": 0.2419012214551248, - "grad_norm": 0.2966195431778511, - "learning_rate": 9.863701262244753e-06, - "loss": 0.032, - "step": 911 - }, - { - "epoch": 0.2421667551779076, - "grad_norm": 0.5999702384639466, - "learning_rate": 9.863191689900796e-06, - "loss": 0.063, - "step": 912 - }, - { - "epoch": 0.24243228890069038, - "grad_norm": 0.5132953676012183, - "learning_rate": 9.862681179990838e-06, - "loss": 0.0231, - "step": 913 - }, - { - "epoch": 0.24269782262347317, - "grad_norm": 0.6668918417364565, - "learning_rate": 9.862169732613298e-06, - "loss": 0.0525, - "step": 914 - }, - { - "epoch": 0.242963356346256, - "grad_norm": 0.6465213387617673, - "learning_rate": 9.861657347866778e-06, - "loss": 0.0957, - "step": 915 - }, - { - "epoch": 0.24322889006903878, - "grad_norm": 0.2980696299642875, - "learning_rate": 9.861144025850058e-06, - "loss": 0.0317, - "step": 916 - }, - { - "epoch": 0.24349442379182157, - "grad_norm": 0.5561195146487935, - "learning_rate": 9.860629766662103e-06, - "loss": 0.0354, - "step": 917 - }, - { - "epoch": 0.24375995751460436, - "grad_norm": 0.8511473475660397, - "learning_rate": 9.860114570402055e-06, - "loss": 0.0354, - "step": 918 - }, - { - "epoch": 0.24402549123738715, - "grad_norm": 1.0193129893064767, - "learning_rate": 9.859598437169236e-06, - "loss": 0.082, - "step": 919 - }, - { - "epoch": 0.24429102496016994, - "grad_norm": 0.38753475869451304, - "learning_rate": 9.859081367063154e-06, - "loss": 0.0432, - "step": 920 - }, - { - "epoch": 0.24455655868295273, - "grad_norm": 1.2376062406421997, - "learning_rate": 9.858563360183491e-06, - "loss": 0.0815, - "step": 921 - }, - { - "epoch": 0.24482209240573552, - "grad_norm": 0.2614059783802485, - "learning_rate": 9.858044416630113e-06, - "loss": 0.0288, - "step": 922 - }, - { - "epoch": 0.2450876261285183, - "grad_norm": 0.7298755606796804, - "learning_rate": 9.857524536503067e-06, - "loss": 0.1191, - "step": 923 - }, - { - "epoch": 0.24535315985130113, - "grad_norm": 0.2655675767167442, - "learning_rate": 9.85700371990258e-06, - "loss": 0.0286, - "step": 924 - }, - { - "epoch": 0.24561869357408392, - "grad_norm": 0.6867605169602318, - "learning_rate": 9.85648196692906e-06, - "loss": 0.063, - "step": 925 - }, - { - "epoch": 0.2458842272968667, - "grad_norm": 0.7154487582123846, - "learning_rate": 9.855959277683092e-06, - "loss": 0.1113, - "step": 926 - }, - { - "epoch": 0.2461497610196495, - "grad_norm": 0.7662936924895251, - "learning_rate": 9.855435652265446e-06, - "loss": 0.0889, - "step": 927 - }, - { - "epoch": 0.2464152947424323, - "grad_norm": 0.9128535852883102, - "learning_rate": 9.854911090777071e-06, - "loss": 0.0815, - "step": 928 - }, - { - "epoch": 0.24668082846521508, - "grad_norm": 0.6423625754170631, - "learning_rate": 9.854385593319096e-06, - "loss": 0.082, - "step": 929 - }, - { - "epoch": 0.24694636218799787, - "grad_norm": 0.3506830449134971, - "learning_rate": 9.853859159992831e-06, - "loss": 0.0393, - "step": 930 - }, - { - "epoch": 0.24721189591078066, - "grad_norm": 0.6686174900920513, - "learning_rate": 9.853331790899765e-06, - "loss": 0.0957, - "step": 931 - }, - { - "epoch": 0.24747742963356345, - "grad_norm": 0.5803696610157401, - "learning_rate": 9.852803486141571e-06, - "loss": 0.0479, - "step": 932 - }, - { - "epoch": 0.24774296335634627, - "grad_norm": 0.6367144973315528, - "learning_rate": 9.852274245820097e-06, - "loss": 0.0889, - "step": 933 - }, - { - "epoch": 0.24800849707912906, - "grad_norm": 0.6019312344886419, - "learning_rate": 9.851744070037376e-06, - "loss": 0.082, - "step": 934 - }, - { - "epoch": 0.24827403080191185, - "grad_norm": 0.40743643859287837, - "learning_rate": 9.851212958895621e-06, - "loss": 0.032, - "step": 935 - }, - { - "epoch": 0.24853956452469464, - "grad_norm": 0.5172725409243906, - "learning_rate": 9.85068091249722e-06, - "loss": 0.063, - "step": 936 - }, - { - "epoch": 0.24880509824747743, - "grad_norm": 0.4756469334653875, - "learning_rate": 9.850147930944748e-06, - "loss": 0.0525, - "step": 937 - }, - { - "epoch": 0.24907063197026022, - "grad_norm": 0.4365306093001885, - "learning_rate": 9.849614014340959e-06, - "loss": 0.0432, - "step": 938 - }, - { - "epoch": 0.249336165693043, - "grad_norm": 0.6047425723994772, - "learning_rate": 9.849079162788782e-06, - "loss": 0.0752, - "step": 939 - }, - { - "epoch": 0.2496016994158258, - "grad_norm": 0.5680420397578201, - "learning_rate": 9.848543376391333e-06, - "loss": 0.0693, - "step": 940 - }, - { - "epoch": 0.2498672331386086, - "grad_norm": 0.8494400098616096, - "learning_rate": 9.848006655251904e-06, - "loss": 0.0393, - "step": 941 - }, - { - "epoch": 0.2501327668613914, - "grad_norm": 0.20707760476213186, - "learning_rate": 9.847468999473967e-06, - "loss": 0.0074, - "step": 942 - }, - { - "epoch": 0.25039830058417417, - "grad_norm": 0.5599934007032675, - "learning_rate": 9.846930409161179e-06, - "loss": 0.063, - "step": 943 - }, - { - "epoch": 0.250663834306957, - "grad_norm": 0.5738129646742892, - "learning_rate": 9.846390884417374e-06, - "loss": 0.0693, - "step": 944 - }, - { - "epoch": 0.25092936802973975, - "grad_norm": 0.3121394791058085, - "learning_rate": 9.845850425346563e-06, - "loss": 0.0093, - "step": 945 - }, - { - "epoch": 0.25119490175252257, - "grad_norm": 0.9012275305666327, - "learning_rate": 9.845309032052941e-06, - "loss": 0.1035, - "step": 946 - }, - { - "epoch": 0.2514604354753054, - "grad_norm": 0.8132551945926066, - "learning_rate": 9.844766704640883e-06, - "loss": 0.1035, - "step": 947 - }, - { - "epoch": 0.25172596919808815, - "grad_norm": 0.5059511213268292, - "learning_rate": 9.844223443214942e-06, - "loss": 0.0576, - "step": 948 - }, - { - "epoch": 0.25199150292087097, - "grad_norm": 0.5004469864929803, - "learning_rate": 9.843679247879856e-06, - "loss": 0.0579, - "step": 949 - }, - { - "epoch": 0.25225703664365373, - "grad_norm": 0.5325050038377581, - "learning_rate": 9.843134118740535e-06, - "loss": 0.063, - "step": 950 - }, - { - "epoch": 0.25252257036643655, - "grad_norm": 0.0018393961535569554, - "learning_rate": 9.842588055902076e-06, - "loss": 0.0, - "step": 951 - }, - { - "epoch": 0.2527881040892193, - "grad_norm": 0.29281234077102586, - "learning_rate": 9.842041059469752e-06, - "loss": 0.0117, - "step": 952 - }, - { - "epoch": 0.25305363781200213, - "grad_norm": 4.655974030022523, - "learning_rate": 9.841493129549018e-06, - "loss": 0.4082, - "step": 953 - }, - { - "epoch": 0.2533191715347849, - "grad_norm": 0.345197837145536, - "learning_rate": 9.840944266245511e-06, - "loss": 0.0354, - "step": 954 - }, - { - "epoch": 0.2535847052575677, - "grad_norm": 0.5150522007650085, - "learning_rate": 9.840394469665043e-06, - "loss": 0.032, - "step": 955 - }, - { - "epoch": 0.25385023898035053, - "grad_norm": 4.355092056512744, - "learning_rate": 9.839843739913606e-06, - "loss": 0.3438, - "step": 956 - }, - { - "epoch": 0.2541157727031333, - "grad_norm": 0.5533036225395148, - "learning_rate": 9.839292077097378e-06, - "loss": 0.063, - "step": 957 - }, - { - "epoch": 0.2543813064259161, - "grad_norm": 0.47245181913716905, - "learning_rate": 9.838739481322711e-06, - "loss": 0.032, - "step": 958 - }, - { - "epoch": 0.25464684014869887, - "grad_norm": 0.015777567489265145, - "learning_rate": 9.838185952696139e-06, - "loss": 0.0004, - "step": 959 - }, - { - "epoch": 0.2549123738714817, - "grad_norm": 0.3070632948499519, - "learning_rate": 9.837631491324379e-06, - "loss": 0.0317, - "step": 960 - }, - { - "epoch": 0.25517790759426445, - "grad_norm": 0.6417501675077917, - "learning_rate": 9.83707609731432e-06, - "loss": 0.0525, - "step": 961 - }, - { - "epoch": 0.25544344131704727, - "grad_norm": 1.103140251013234, - "learning_rate": 9.836519770773038e-06, - "loss": 0.0889, - "step": 962 - }, - { - "epoch": 0.25570897503983003, - "grad_norm": 0.36859782996784346, - "learning_rate": 9.835962511807786e-06, - "loss": 0.0391, - "step": 963 - }, - { - "epoch": 0.25597450876261285, - "grad_norm": 0.3995475012233262, - "learning_rate": 9.835404320525997e-06, - "loss": 0.0432, - "step": 964 - }, - { - "epoch": 0.25624004248539567, - "grad_norm": 0.6669653652071353, - "learning_rate": 9.834845197035282e-06, - "loss": 0.0752, - "step": 965 - }, - { - "epoch": 0.25650557620817843, - "grad_norm": 0.9463153272300644, - "learning_rate": 9.834285141443436e-06, - "loss": 0.082, - "step": 966 - }, - { - "epoch": 0.25677110993096125, - "grad_norm": 0.7472061020733514, - "learning_rate": 9.833724153858428e-06, - "loss": 0.0479, - "step": 967 - }, - { - "epoch": 0.257036643653744, - "grad_norm": 0.5964140946194706, - "learning_rate": 9.833162234388413e-06, - "loss": 0.0525, - "step": 968 - }, - { - "epoch": 0.25730217737652683, - "grad_norm": 0.69941976438153, - "learning_rate": 9.832599383141722e-06, - "loss": 0.0957, - "step": 969 - }, - { - "epoch": 0.2575677110993096, - "grad_norm": 0.6938979357435278, - "learning_rate": 9.832035600226863e-06, - "loss": 0.0957, - "step": 970 - }, - { - "epoch": 0.2578332448220924, - "grad_norm": 1.6505906761311266, - "learning_rate": 9.83147088575253e-06, - "loss": 0.0957, - "step": 971 - }, - { - "epoch": 0.2580987785448752, - "grad_norm": 0.2571270449075492, - "learning_rate": 9.830905239827592e-06, - "loss": 0.0258, - "step": 972 - }, - { - "epoch": 0.258364312267658, - "grad_norm": 0.17362195702763106, - "learning_rate": 9.830338662561098e-06, - "loss": 0.0059, - "step": 973 - }, - { - "epoch": 0.2586298459904408, - "grad_norm": 0.3252416871542705, - "learning_rate": 9.82977115406228e-06, - "loss": 0.0354, - "step": 974 - }, - { - "epoch": 0.2588953797132236, - "grad_norm": 0.7560895172752308, - "learning_rate": 9.829202714440544e-06, - "loss": 0.0752, - "step": 975 - }, - { - "epoch": 0.2591609134360064, - "grad_norm": 0.8515843549515478, - "learning_rate": 9.828633343805482e-06, - "loss": 0.032, - "step": 976 - }, - { - "epoch": 0.25942644715878915, - "grad_norm": 0.06839867591689461, - "learning_rate": 9.828063042266858e-06, - "loss": 0.0018, - "step": 977 - }, - { - "epoch": 0.25969198088157197, - "grad_norm": 0.2973877835283162, - "learning_rate": 9.827491809934621e-06, - "loss": 0.0317, - "step": 978 - }, - { - "epoch": 0.25995751460435473, - "grad_norm": 0.050237992088855266, - "learning_rate": 9.826919646918899e-06, - "loss": 0.0014, - "step": 979 - }, - { - "epoch": 0.26022304832713755, - "grad_norm": 0.6244940971022646, - "learning_rate": 9.826346553329997e-06, - "loss": 0.063, - "step": 980 - }, - { - "epoch": 0.2604885820499203, - "grad_norm": 0.3832845757482919, - "learning_rate": 9.825772529278402e-06, - "loss": 0.0393, - "step": 981 - }, - { - "epoch": 0.26075411577270313, - "grad_norm": 0.5607756019076436, - "learning_rate": 9.825197574874775e-06, - "loss": 0.0288, - "step": 982 - }, - { - "epoch": 0.26101964949548595, - "grad_norm": 0.3614253694825283, - "learning_rate": 9.824621690229966e-06, - "loss": 0.0354, - "step": 983 - }, - { - "epoch": 0.2612851832182687, - "grad_norm": 1.7457333721108772, - "learning_rate": 9.824044875454995e-06, - "loss": 0.0815, - "step": 984 - }, - { - "epoch": 0.26155071694105153, - "grad_norm": 0.39656420534134673, - "learning_rate": 9.823467130661068e-06, - "loss": 0.0354, - "step": 985 - }, - { - "epoch": 0.2618162506638343, - "grad_norm": 0.4441491434154354, - "learning_rate": 9.822888455959564e-06, - "loss": 0.0391, - "step": 986 - }, - { - "epoch": 0.2620817843866171, - "grad_norm": 0.9091935280621182, - "learning_rate": 9.822308851462046e-06, - "loss": 0.063, - "step": 987 - }, - { - "epoch": 0.2623473181093999, - "grad_norm": 0.2809293161710395, - "learning_rate": 9.821728317280257e-06, - "loss": 0.0258, - "step": 988 - }, - { - "epoch": 0.2626128518321827, - "grad_norm": 0.25690503502749795, - "learning_rate": 9.821146853526116e-06, - "loss": 0.0231, - "step": 989 - }, - { - "epoch": 0.26287838555496545, - "grad_norm": 0.9504240651916706, - "learning_rate": 9.820564460311719e-06, - "loss": 0.1035, - "step": 990 - }, - { - "epoch": 0.2631439192777483, - "grad_norm": 1.062595886433007, - "learning_rate": 9.81998113774935e-06, - "loss": 0.1553, - "step": 991 - }, - { - "epoch": 0.2634094530005311, - "grad_norm": 0.08306616186641885, - "learning_rate": 9.819396885951462e-06, - "loss": 0.0022, - "step": 992 - }, - { - "epoch": 0.26367498672331385, - "grad_norm": 0.00883581144917326, - "learning_rate": 9.818811705030696e-06, - "loss": 0.0002, - "step": 993 - }, - { - "epoch": 0.26394052044609667, - "grad_norm": 0.1998801649088699, - "learning_rate": 9.818225595099864e-06, - "loss": 0.0165, - "step": 994 - }, - { - "epoch": 0.26420605416887943, - "grad_norm": 0.8818331545317252, - "learning_rate": 9.817638556271964e-06, - "loss": 0.1367, - "step": 995 - }, - { - "epoch": 0.26447158789166225, - "grad_norm": 1.6642224007144948, - "learning_rate": 9.81705058866017e-06, - "loss": 0.0752, - "step": 996 - }, - { - "epoch": 0.264737121614445, - "grad_norm": 0.719955269890806, - "learning_rate": 9.816461692377833e-06, - "loss": 0.0317, - "step": 997 - }, - { - "epoch": 0.26500265533722783, - "grad_norm": 0.7209406456988616, - "learning_rate": 9.815871867538486e-06, - "loss": 0.0476, - "step": 998 - }, - { - "epoch": 0.2652681890600106, - "grad_norm": 0.814061894388583, - "learning_rate": 9.815281114255841e-06, - "loss": 0.063, - "step": 999 - }, - { - "epoch": 0.2655337227827934, - "grad_norm": 0.9966166060448677, - "learning_rate": 9.81468943264379e-06, - "loss": 0.1035, - "step": 1000 - }, - { - "epoch": 0.26579925650557623, - "grad_norm": 0.22419755071513356, - "learning_rate": 9.814096822816398e-06, - "loss": 0.0061, - "step": 1001 - }, - { - "epoch": 0.266064790228359, - "grad_norm": 0.2754600582043281, - "learning_rate": 9.813503284887916e-06, - "loss": 0.0258, - "step": 1002 - }, - { - "epoch": 0.2663303239511418, - "grad_norm": 1.1239681806680273, - "learning_rate": 9.812908818972771e-06, - "loss": 0.0889, - "step": 1003 - }, - { - "epoch": 0.2665958576739246, - "grad_norm": 0.5803488339467218, - "learning_rate": 9.81231342518557e-06, - "loss": 0.0476, - "step": 1004 - }, - { - "epoch": 0.2668613913967074, - "grad_norm": 0.35519796165827866, - "learning_rate": 9.811717103641096e-06, - "loss": 0.0286, - "step": 1005 - }, - { - "epoch": 0.26712692511949016, - "grad_norm": 2.462660544438473, - "learning_rate": 9.811119854454313e-06, - "loss": 0.0957, - "step": 1006 - }, - { - "epoch": 0.267392458842273, - "grad_norm": 0.0859060653931223, - "learning_rate": 9.810521677740363e-06, - "loss": 0.0022, - "step": 1007 - }, - { - "epoch": 0.26765799256505574, - "grad_norm": 0.21988261467431752, - "learning_rate": 9.80992257361457e-06, - "loss": 0.0206, - "step": 1008 - }, - { - "epoch": 0.26792352628783855, - "grad_norm": 0.37935970444692796, - "learning_rate": 9.809322542192432e-06, - "loss": 0.0317, - "step": 1009 - }, - { - "epoch": 0.26818906001062137, - "grad_norm": 0.3156766850538133, - "learning_rate": 9.808721583589628e-06, - "loss": 0.0286, - "step": 1010 - }, - { - "epoch": 0.26845459373340413, - "grad_norm": 0.17021294848589957, - "learning_rate": 9.808119697922016e-06, - "loss": 0.0146, - "step": 1011 - }, - { - "epoch": 0.26872012745618695, - "grad_norm": 1.3132137393142103, - "learning_rate": 9.807516885305634e-06, - "loss": 0.1641, - "step": 1012 - }, - { - "epoch": 0.2689856611789697, - "grad_norm": 0.14707242483307284, - "learning_rate": 9.806913145856697e-06, - "loss": 0.0115, - "step": 1013 - }, - { - "epoch": 0.26925119490175253, - "grad_norm": 3.511760120759758, - "learning_rate": 9.806308479691595e-06, - "loss": 0.2061, - "step": 1014 - }, - { - "epoch": 0.2695167286245353, - "grad_norm": 0.8061004291701489, - "learning_rate": 9.805702886926904e-06, - "loss": 0.0261, - "step": 1015 - }, - { - "epoch": 0.2697822623473181, - "grad_norm": 0.2234690978519963, - "learning_rate": 9.805096367679375e-06, - "loss": 0.0184, - "step": 1016 - }, - { - "epoch": 0.2700477960701009, - "grad_norm": 0.44975353193446077, - "learning_rate": 9.804488922065937e-06, - "loss": 0.0258, - "step": 1017 - }, - { - "epoch": 0.2703133297928837, - "grad_norm": 0.6109060230396508, - "learning_rate": 9.8038805502037e-06, - "loss": 0.0354, - "step": 1018 - }, - { - "epoch": 0.2705788635156665, - "grad_norm": 0.19828294757841264, - "learning_rate": 9.803271252209947e-06, - "loss": 0.0184, - "step": 1019 - }, - { - "epoch": 0.2708443972384493, - "grad_norm": 0.25772979651426625, - "learning_rate": 9.802661028202147e-06, - "loss": 0.0165, - "step": 1020 - }, - { - "epoch": 0.2711099309612321, - "grad_norm": 1.1839479936613346, - "learning_rate": 9.802049878297942e-06, - "loss": 0.0525, - "step": 1021 - }, - { - "epoch": 0.27137546468401486, - "grad_norm": 0.31430038721241105, - "learning_rate": 9.801437802615156e-06, - "loss": 0.0082, - "step": 1022 - }, - { - "epoch": 0.2716409984067977, - "grad_norm": 1.1023674078125263, - "learning_rate": 9.80082480127179e-06, - "loss": 0.1191, - "step": 1023 - }, - { - "epoch": 0.27190653212958044, - "grad_norm": 0.22162338231371304, - "learning_rate": 9.800210874386022e-06, - "loss": 0.0117, - "step": 1024 - }, - { - "epoch": 0.27217206585236325, - "grad_norm": 0.020666537436477367, - "learning_rate": 9.79959602207621e-06, - "loss": 0.0004, - "step": 1025 - }, - { - "epoch": 0.272437599575146, - "grad_norm": 0.1640864385317412, - "learning_rate": 9.798980244460892e-06, - "loss": 0.0131, - "step": 1026 - }, - { - "epoch": 0.27270313329792883, - "grad_norm": 1.33096980689375, - "learning_rate": 9.798363541658782e-06, - "loss": 0.1455, - "step": 1027 - }, - { - "epoch": 0.27296866702071165, - "grad_norm": 0.0030561000010345305, - "learning_rate": 9.797745913788772e-06, - "loss": 0.0001, - "step": 1028 - }, - { - "epoch": 0.2732342007434944, - "grad_norm": 0.3353151513139151, - "learning_rate": 9.797127360969933e-06, - "loss": 0.0093, - "step": 1029 - }, - { - "epoch": 0.27349973446627723, - "grad_norm": 0.24279565510695517, - "learning_rate": 9.796507883321519e-06, - "loss": 0.0165, - "step": 1030 - }, - { - "epoch": 0.27376526818906, - "grad_norm": 0.6409923619811838, - "learning_rate": 9.795887480962951e-06, - "loss": 0.0206, - "step": 1031 - }, - { - "epoch": 0.2740308019118428, - "grad_norm": 0.1865018425314411, - "learning_rate": 9.79526615401384e-06, - "loss": 0.0146, - "step": 1032 - }, - { - "epoch": 0.2742963356346256, - "grad_norm": 0.5691428064000503, - "learning_rate": 9.79464390259397e-06, - "loss": 0.0354, - "step": 1033 - }, - { - "epoch": 0.2745618693574084, - "grad_norm": 1.0718553071053665, - "learning_rate": 9.794020726823303e-06, - "loss": 0.1934, - "step": 1034 - }, - { - "epoch": 0.27482740308019116, - "grad_norm": 1.338016801862817, - "learning_rate": 9.79339662682198e-06, - "loss": 0.1279, - "step": 1035 - }, - { - "epoch": 0.275092936802974, - "grad_norm": 0.3434764844856958, - "learning_rate": 9.79277160271032e-06, - "loss": 0.0229, - "step": 1036 - }, - { - "epoch": 0.2753584705257568, - "grad_norm": 0.9536560542927063, - "learning_rate": 9.79214565460882e-06, - "loss": 0.1367, - "step": 1037 - }, - { - "epoch": 0.27562400424853956, - "grad_norm": 0.2777647682007733, - "learning_rate": 9.791518782638157e-06, - "loss": 0.0229, - "step": 1038 - }, - { - "epoch": 0.2758895379713224, - "grad_norm": 2.8846935746827778, - "learning_rate": 9.790890986919184e-06, - "loss": 0.1035, - "step": 1039 - }, - { - "epoch": 0.27615507169410514, - "grad_norm": 0.9622843736990357, - "learning_rate": 9.79026226757293e-06, - "loss": 0.1455, - "step": 1040 - }, - { - "epoch": 0.27642060541688795, - "grad_norm": 1.3517743387903762, - "learning_rate": 9.789632624720608e-06, - "loss": 0.1191, - "step": 1041 - }, - { - "epoch": 0.2766861391396707, - "grad_norm": 0.3315039166067069, - "learning_rate": 9.789002058483604e-06, - "loss": 0.0286, - "step": 1042 - }, - { - "epoch": 0.27695167286245354, - "grad_norm": 0.0228123965731073, - "learning_rate": 9.788370568983483e-06, - "loss": 0.0005, - "step": 1043 - }, - { - "epoch": 0.2772172065852363, - "grad_norm": 1.4788814320601762, - "learning_rate": 9.787738156341992e-06, - "loss": 0.0393, - "step": 1044 - }, - { - "epoch": 0.2774827403080191, - "grad_norm": 1.146212270628032, - "learning_rate": 9.78710482068105e-06, - "loss": 0.0649, - "step": 1045 - }, - { - "epoch": 0.27774827403080193, - "grad_norm": 0.5981452393985338, - "learning_rate": 9.786470562122758e-06, - "loss": 0.0391, - "step": 1046 - }, - { - "epoch": 0.2780138077535847, - "grad_norm": 0.10195539381598936, - "learning_rate": 9.78583538078939e-06, - "loss": 0.0026, - "step": 1047 - }, - { - "epoch": 0.2782793414763675, - "grad_norm": 0.3185752679243096, - "learning_rate": 9.785199276803408e-06, - "loss": 0.0317, - "step": 1048 - }, - { - "epoch": 0.2785448751991503, - "grad_norm": 0.3967640673554989, - "learning_rate": 9.78456225028744e-06, - "loss": 0.0432, - "step": 1049 - }, - { - "epoch": 0.2788104089219331, - "grad_norm": 0.7810124894163271, - "learning_rate": 9.783924301364297e-06, - "loss": 0.0184, - "step": 1050 - }, - { - "epoch": 0.27907594264471586, - "grad_norm": 0.08506478322426231, - "learning_rate": 9.783285430156972e-06, - "loss": 0.0022, - "step": 1051 - }, - { - "epoch": 0.2793414763674987, - "grad_norm": 0.7008776117228477, - "learning_rate": 9.782645636788628e-06, - "loss": 0.0476, - "step": 1052 - }, - { - "epoch": 0.27960701009028144, - "grad_norm": 0.3314725833848813, - "learning_rate": 9.782004921382612e-06, - "loss": 0.0354, - "step": 1053 - }, - { - "epoch": 0.27987254381306426, - "grad_norm": 0.03864368830561217, - "learning_rate": 9.781363284062446e-06, - "loss": 0.0008, - "step": 1054 - }, - { - "epoch": 0.2801380775358471, - "grad_norm": 0.5162549754187832, - "learning_rate": 9.78072072495183e-06, - "loss": 0.0479, - "step": 1055 - }, - { - "epoch": 0.28040361125862984, - "grad_norm": 0.5385016039999986, - "learning_rate": 9.780077244174642e-06, - "loss": 0.0184, - "step": 1056 - }, - { - "epoch": 0.28066914498141265, - "grad_norm": 0.39671754689268834, - "learning_rate": 9.779432841854937e-06, - "loss": 0.0391, - "step": 1057 - }, - { - "epoch": 0.2809346787041954, - "grad_norm": 0.3533088509471452, - "learning_rate": 9.778787518116948e-06, - "loss": 0.0354, - "step": 1058 - }, - { - "epoch": 0.28120021242697824, - "grad_norm": 0.8001393279845374, - "learning_rate": 9.778141273085085e-06, - "loss": 0.0752, - "step": 1059 - }, - { - "epoch": 0.281465746149761, - "grad_norm": 0.9390525477977365, - "learning_rate": 9.777494106883938e-06, - "loss": 0.063, - "step": 1060 - }, - { - "epoch": 0.2817312798725438, - "grad_norm": 0.09034297328274829, - "learning_rate": 9.776846019638273e-06, - "loss": 0.0015, - "step": 1061 - }, - { - "epoch": 0.28199681359532663, - "grad_norm": 0.3397054585962954, - "learning_rate": 9.776197011473034e-06, - "loss": 0.0073, - "step": 1062 - }, - { - "epoch": 0.2822623473181094, - "grad_norm": 0.8269003857531309, - "learning_rate": 9.775547082513339e-06, - "loss": 0.0391, - "step": 1063 - }, - { - "epoch": 0.2825278810408922, - "grad_norm": 11.321097522244969, - "learning_rate": 9.77489623288449e-06, - "loss": 0.3203, - "step": 1064 - }, - { - "epoch": 0.282793414763675, - "grad_norm": 0.3961581646443348, - "learning_rate": 9.774244462711962e-06, - "loss": 0.0354, - "step": 1065 - }, - { - "epoch": 0.2830589484864578, - "grad_norm": 0.9979585458391305, - "learning_rate": 9.773591772121409e-06, - "loss": 0.0815, - "step": 1066 - }, - { - "epoch": 0.28332448220924056, - "grad_norm": 0.24274949520810776, - "learning_rate": 9.77293816123866e-06, - "loss": 0.0051, - "step": 1067 - }, - { - "epoch": 0.2835900159320234, - "grad_norm": 5.344035497507486, - "learning_rate": 9.772283630189727e-06, - "loss": 0.2773, - "step": 1068 - }, - { - "epoch": 0.28385554965480614, - "grad_norm": 0.45994201443551747, - "learning_rate": 9.771628179100793e-06, - "loss": 0.0391, - "step": 1069 - }, - { - "epoch": 0.28412108337758896, - "grad_norm": 1.055834204920163, - "learning_rate": 9.770971808098221e-06, - "loss": 0.0525, - "step": 1070 - }, - { - "epoch": 0.2843866171003718, - "grad_norm": 0.5618926736078067, - "learning_rate": 9.770314517308554e-06, - "loss": 0.0317, - "step": 1071 - }, - { - "epoch": 0.28465215082315454, - "grad_norm": 0.24186740542409346, - "learning_rate": 9.76965630685851e-06, - "loss": 0.0206, - "step": 1072 - }, - { - "epoch": 0.28491768454593736, - "grad_norm": 0.9072361060971639, - "learning_rate": 9.768997176874982e-06, - "loss": 0.1191, - "step": 1073 - }, - { - "epoch": 0.2851832182687201, - "grad_norm": 1.1218511212461664, - "learning_rate": 9.768337127485044e-06, - "loss": 0.0576, - "step": 1074 - }, - { - "epoch": 0.28544875199150294, - "grad_norm": 0.2496818724866189, - "learning_rate": 9.767676158815944e-06, - "loss": 0.0073, - "step": 1075 - }, - { - "epoch": 0.2857142857142857, - "grad_norm": 1.5823043624366155, - "learning_rate": 9.76701427099511e-06, - "loss": 0.0688, - "step": 1076 - }, - { - "epoch": 0.2859798194370685, - "grad_norm": 2.327890956163168, - "learning_rate": 9.766351464150146e-06, - "loss": 0.1738, - "step": 1077 - }, - { - "epoch": 0.2862453531598513, - "grad_norm": 1.8266555516390197, - "learning_rate": 9.765687738408834e-06, - "loss": 0.0957, - "step": 1078 - }, - { - "epoch": 0.2865108868826341, - "grad_norm": 0.21974129050224078, - "learning_rate": 9.765023093899133e-06, - "loss": 0.0184, - "step": 1079 - }, - { - "epoch": 0.2867764206054169, - "grad_norm": 1.213711313563219, - "learning_rate": 9.764357530749178e-06, - "loss": 0.0576, - "step": 1080 - }, - { - "epoch": 0.2870419543281997, - "grad_norm": 2.075553937576423, - "learning_rate": 9.76369104908728e-06, - "loss": 0.0957, - "step": 1081 - }, - { - "epoch": 0.2873074880509825, - "grad_norm": 0.927208051761271, - "learning_rate": 9.76302364904193e-06, - "loss": 0.1455, - "step": 1082 - }, - { - "epoch": 0.28757302177376526, - "grad_norm": 0.7356955614292838, - "learning_rate": 9.762355330741797e-06, - "loss": 0.0231, - "step": 1083 - }, - { - "epoch": 0.2878385554965481, - "grad_norm": 0.7816526361217149, - "learning_rate": 9.761686094315721e-06, - "loss": 0.0432, - "step": 1084 - }, - { - "epoch": 0.28810408921933084, - "grad_norm": 1.7402446527684008, - "learning_rate": 9.761015939892727e-06, - "loss": 0.0957, - "step": 1085 - }, - { - "epoch": 0.28836962294211366, - "grad_norm": 0.05762993692685398, - "learning_rate": 9.760344867602008e-06, - "loss": 0.0003, - "step": 1086 - }, - { - "epoch": 0.2886351566648964, - "grad_norm": 0.024188141344319784, - "learning_rate": 9.759672877572943e-06, - "loss": 0.0005, - "step": 1087 - }, - { - "epoch": 0.28890069038767924, - "grad_norm": 0.13702148519441235, - "learning_rate": 9.758999969935083e-06, - "loss": 0.0017, - "step": 1088 - }, - { - "epoch": 0.28916622411046206, - "grad_norm": 0.23185659329270913, - "learning_rate": 9.758326144818155e-06, - "loss": 0.0206, - "step": 1089 - }, - { - "epoch": 0.2894317578332448, - "grad_norm": 2.020274635548338, - "learning_rate": 9.757651402352067e-06, - "loss": 0.1191, - "step": 1090 - }, - { - "epoch": 0.28969729155602764, - "grad_norm": 0.23791843579850122, - "learning_rate": 9.756975742666899e-06, - "loss": 0.0231, - "step": 1091 - }, - { - "epoch": 0.2899628252788104, - "grad_norm": 1.5926155722260418, - "learning_rate": 9.756299165892911e-06, - "loss": 0.0576, - "step": 1092 - }, - { - "epoch": 0.2902283590015932, - "grad_norm": 0.29759291019807677, - "learning_rate": 9.755621672160538e-06, - "loss": 0.0258, - "step": 1093 - }, - { - "epoch": 0.290493892724376, - "grad_norm": 0.3257843081640135, - "learning_rate": 9.754943261600396e-06, - "loss": 0.0206, - "step": 1094 - }, - { - "epoch": 0.2907594264471588, - "grad_norm": 0.24127331860074858, - "learning_rate": 9.754263934343272e-06, - "loss": 0.0206, - "step": 1095 - }, - { - "epoch": 0.29102496016994156, - "grad_norm": 0.05613263082076969, - "learning_rate": 9.753583690520132e-06, - "loss": 0.0007, - "step": 1096 - }, - { - "epoch": 0.2912904938927244, - "grad_norm": 0.15571605076216163, - "learning_rate": 9.75290253026212e-06, - "loss": 0.0131, - "step": 1097 - }, - { - "epoch": 0.2915560276155072, - "grad_norm": 0.9548153892509748, - "learning_rate": 9.752220453700556e-06, - "loss": 0.1455, - "step": 1098 - }, - { - "epoch": 0.29182156133828996, - "grad_norm": 0.03386757581041895, - "learning_rate": 9.751537460966935e-06, - "loss": 0.0008, - "step": 1099 - }, - { - "epoch": 0.2920870950610728, - "grad_norm": 1.0372337414427726, - "learning_rate": 9.750853552192931e-06, - "loss": 0.0889, - "step": 1100 - }, - { - "epoch": 0.29235262878385554, - "grad_norm": 0.7281567499028863, - "learning_rate": 9.750168727510394e-06, - "loss": 0.0286, - "step": 1101 - }, - { - "epoch": 0.29261816250663836, - "grad_norm": 0.6380207840024593, - "learning_rate": 9.749482987051349e-06, - "loss": 0.0391, - "step": 1102 - }, - { - "epoch": 0.2928836962294211, - "grad_norm": 0.9729212672078682, - "learning_rate": 9.748796330947998e-06, - "loss": 0.0889, - "step": 1103 - }, - { - "epoch": 0.29314922995220394, - "grad_norm": 1.0018172347011092, - "learning_rate": 9.748108759332722e-06, - "loss": 0.0815, - "step": 1104 - }, - { - "epoch": 0.2934147636749867, - "grad_norm": 0.004923265696314747, - "learning_rate": 9.747420272338078e-06, - "loss": 0.0001, - "step": 1105 - }, - { - "epoch": 0.2936802973977695, - "grad_norm": 0.022021697905026522, - "learning_rate": 9.746730870096794e-06, - "loss": 0.0005, - "step": 1106 - }, - { - "epoch": 0.29394583112055234, - "grad_norm": 1.6710028936610164, - "learning_rate": 9.74604055274178e-06, - "loss": 0.0815, - "step": 1107 - }, - { - "epoch": 0.2942113648433351, - "grad_norm": 1.3634452754585142, - "learning_rate": 9.745349320406123e-06, - "loss": 0.0432, - "step": 1108 - }, - { - "epoch": 0.2944768985661179, - "grad_norm": 0.8146412207177869, - "learning_rate": 9.744657173223082e-06, - "loss": 0.1113, - "step": 1109 - }, - { - "epoch": 0.2947424322889007, - "grad_norm": 1.2206960698761407, - "learning_rate": 9.743964111326098e-06, - "loss": 0.0688, - "step": 1110 - }, - { - "epoch": 0.2950079660116835, - "grad_norm": 0.042821624435204764, - "learning_rate": 9.743270134848782e-06, - "loss": 0.0004, - "step": 1111 - }, - { - "epoch": 0.29527349973446626, - "grad_norm": 0.6103965509662032, - "learning_rate": 9.742575243924925e-06, - "loss": 0.0206, - "step": 1112 - }, - { - "epoch": 0.2955390334572491, - "grad_norm": 0.31275659015640817, - "learning_rate": 9.741879438688495e-06, - "loss": 0.0317, - "step": 1113 - }, - { - "epoch": 0.29580456718003184, - "grad_norm": 0.7391124276417567, - "learning_rate": 9.741182719273636e-06, - "loss": 0.0815, - "step": 1114 - }, - { - "epoch": 0.29607010090281466, - "grad_norm": 1.8465376752624503, - "learning_rate": 9.740485085814665e-06, - "loss": 0.1113, - "step": 1115 - }, - { - "epoch": 0.2963356346255975, - "grad_norm": 0.8365414433585058, - "learning_rate": 9.739786538446076e-06, - "loss": 0.0576, - "step": 1116 - }, - { - "epoch": 0.29660116834838024, - "grad_norm": 0.7185562716195945, - "learning_rate": 9.739087077302547e-06, - "loss": 0.063, - "step": 1117 - }, - { - "epoch": 0.29686670207116306, - "grad_norm": 0.24881622042316806, - "learning_rate": 9.738386702518918e-06, - "loss": 0.0082, - "step": 1118 - }, - { - "epoch": 0.2971322357939458, - "grad_norm": 0.82204521512429, - "learning_rate": 9.737685414230218e-06, - "loss": 0.0476, - "step": 1119 - }, - { - "epoch": 0.29739776951672864, - "grad_norm": 0.4442702937110363, - "learning_rate": 9.736983212571646e-06, - "loss": 0.0476, - "step": 1120 - }, - { - "epoch": 0.2976633032395114, - "grad_norm": 0.017044609666170246, - "learning_rate": 9.73628009767858e-06, - "loss": 0.0004, - "step": 1121 - }, - { - "epoch": 0.2979288369622942, - "grad_norm": 1.2421240055318807, - "learning_rate": 9.73557606968657e-06, - "loss": 0.1035, - "step": 1122 - }, - { - "epoch": 0.298194370685077, - "grad_norm": 0.048110722657898286, - "learning_rate": 9.734871128731342e-06, - "loss": 0.0006, - "step": 1123 - }, - { - "epoch": 0.2984599044078598, - "grad_norm": 4.6600239173016185, - "learning_rate": 9.734165274948806e-06, - "loss": 0.5469, - "step": 1124 - }, - { - "epoch": 0.2987254381306426, - "grad_norm": 0.7244351120756174, - "learning_rate": 9.733458508475038e-06, - "loss": 0.0576, - "step": 1125 - }, - { - "epoch": 0.2989909718534254, - "grad_norm": 0.004283320379219228, - "learning_rate": 9.732750829446295e-06, - "loss": 0.0001, - "step": 1126 - }, - { - "epoch": 0.2992565055762082, - "grad_norm": 0.7629355667225856, - "learning_rate": 9.73204223799901e-06, - "loss": 0.1191, - "step": 1127 - }, - { - "epoch": 0.29952203929899096, - "grad_norm": 0.6208152245082953, - "learning_rate": 9.731332734269791e-06, - "loss": 0.0479, - "step": 1128 - }, - { - "epoch": 0.2997875730217738, - "grad_norm": 0.6038756411851064, - "learning_rate": 9.730622318395422e-06, - "loss": 0.0576, - "step": 1129 - }, - { - "epoch": 0.30005310674455654, - "grad_norm": 0.6098024815769895, - "learning_rate": 9.729910990512862e-06, - "loss": 0.0752, - "step": 1130 - }, - { - "epoch": 0.30031864046733936, - "grad_norm": 0.6672028744949045, - "learning_rate": 9.729198750759248e-06, - "loss": 0.0688, - "step": 1131 - }, - { - "epoch": 0.3005841741901221, - "grad_norm": 0.42173926069039175, - "learning_rate": 9.72848559927189e-06, - "loss": 0.0432, - "step": 1132 - }, - { - "epoch": 0.30084970791290494, - "grad_norm": 0.4224982111516302, - "learning_rate": 9.727771536188275e-06, - "loss": 0.0476, - "step": 1133 - }, - { - "epoch": 0.30111524163568776, - "grad_norm": 0.818705810028351, - "learning_rate": 9.727056561646067e-06, - "loss": 0.0815, - "step": 1134 - }, - { - "epoch": 0.3013807753584705, - "grad_norm": 0.4673281241576781, - "learning_rate": 9.726340675783105e-06, - "loss": 0.0391, - "step": 1135 - }, - { - "epoch": 0.30164630908125334, - "grad_norm": 0.6576605686901854, - "learning_rate": 9.725623878737402e-06, - "loss": 0.0752, - "step": 1136 - }, - { - "epoch": 0.3019118428040361, - "grad_norm": 0.5547502104537158, - "learning_rate": 9.724906170647151e-06, - "loss": 0.0479, - "step": 1137 - }, - { - "epoch": 0.3021773765268189, - "grad_norm": 0.35579660396826157, - "learning_rate": 9.724187551650712e-06, - "loss": 0.0132, - "step": 1138 - }, - { - "epoch": 0.3024429102496017, - "grad_norm": 0.4736433372724878, - "learning_rate": 9.723468021886632e-06, - "loss": 0.0525, - "step": 1139 - }, - { - "epoch": 0.3027084439723845, - "grad_norm": 0.02789314147339348, - "learning_rate": 9.722747581493625e-06, - "loss": 0.0007, - "step": 1140 - }, - { - "epoch": 0.30297397769516726, - "grad_norm": 0.21925197694495466, - "learning_rate": 9.722026230610584e-06, - "loss": 0.0206, - "step": 1141 - }, - { - "epoch": 0.3032395114179501, - "grad_norm": 0.010099703111864153, - "learning_rate": 9.721303969376576e-06, - "loss": 0.0003, - "step": 1142 - }, - { - "epoch": 0.3035050451407329, - "grad_norm": 0.31933908172577696, - "learning_rate": 9.720580797930845e-06, - "loss": 0.0354, - "step": 1143 - }, - { - "epoch": 0.30377057886351566, - "grad_norm": 1.1192764070316166, - "learning_rate": 9.719856716412813e-06, - "loss": 0.0889, - "step": 1144 - }, - { - "epoch": 0.3040361125862985, - "grad_norm": 0.6684300524852204, - "learning_rate": 9.71913172496207e-06, - "loss": 0.1113, - "step": 1145 - }, - { - "epoch": 0.30430164630908124, - "grad_norm": 0.29373443475884653, - "learning_rate": 9.718405823718386e-06, - "loss": 0.0317, - "step": 1146 - }, - { - "epoch": 0.30456718003186406, - "grad_norm": 0.3658446950988331, - "learning_rate": 9.71767901282171e-06, - "loss": 0.0432, - "step": 1147 - }, - { - "epoch": 0.3048327137546468, - "grad_norm": 0.567931719494275, - "learning_rate": 9.716951292412158e-06, - "loss": 0.0432, - "step": 1148 - }, - { - "epoch": 0.30509824747742964, - "grad_norm": 0.06610736970007457, - "learning_rate": 9.71622266263003e-06, - "loss": 0.0016, - "step": 1149 - }, - { - "epoch": 0.3053637812002124, - "grad_norm": 0.24553686436218464, - "learning_rate": 9.715493123615793e-06, - "loss": 0.0258, - "step": 1150 - }, - { - "epoch": 0.3056293149229952, - "grad_norm": 0.3986171599233714, - "learning_rate": 9.714762675510098e-06, - "loss": 0.0432, - "step": 1151 - }, - { - "epoch": 0.30589484864577804, - "grad_norm": 0.03893798946143774, - "learning_rate": 9.714031318453763e-06, - "loss": 0.001, - "step": 1152 - }, - { - "epoch": 0.3061603823685608, - "grad_norm": 0.30020768346470134, - "learning_rate": 9.713299052587788e-06, - "loss": 0.0066, - "step": 1153 - }, - { - "epoch": 0.3064259160913436, - "grad_norm": 0.7987344718538759, - "learning_rate": 9.712565878053344e-06, - "loss": 0.1191, - "step": 1154 - }, - { - "epoch": 0.3066914498141264, - "grad_norm": 0.2734737664568203, - "learning_rate": 9.711831794991777e-06, - "loss": 0.0286, - "step": 1155 - }, - { - "epoch": 0.3069569835369092, - "grad_norm": 0.9407886341613316, - "learning_rate": 9.711096803544612e-06, - "loss": 0.0206, - "step": 1156 - }, - { - "epoch": 0.30722251725969196, - "grad_norm": 0.2662920174573857, - "learning_rate": 9.710360903853545e-06, - "loss": 0.0286, - "step": 1157 - }, - { - "epoch": 0.3074880509824748, - "grad_norm": 0.40813353628480753, - "learning_rate": 9.70962409606045e-06, - "loss": 0.0432, - "step": 1158 - }, - { - "epoch": 0.30775358470525754, - "grad_norm": 0.33115218272803004, - "learning_rate": 9.708886380307373e-06, - "loss": 0.0286, - "step": 1159 - }, - { - "epoch": 0.30801911842804036, - "grad_norm": 2.752464726652913, - "learning_rate": 9.70814775673654e-06, - "loss": 0.1113, - "step": 1160 - }, - { - "epoch": 0.3082846521508232, - "grad_norm": 0.21100244178718114, - "learning_rate": 9.707408225490343e-06, - "loss": 0.0206, - "step": 1161 - }, - { - "epoch": 0.30855018587360594, - "grad_norm": 0.273597747914562, - "learning_rate": 9.706667786711362e-06, - "loss": 0.0286, - "step": 1162 - }, - { - "epoch": 0.30881571959638876, - "grad_norm": 0.5455974875060108, - "learning_rate": 9.705926440542342e-06, - "loss": 0.0476, - "step": 1163 - }, - { - "epoch": 0.3090812533191715, - "grad_norm": 0.04996096845556894, - "learning_rate": 9.705184187126205e-06, - "loss": 0.001, - "step": 1164 - }, - { - "epoch": 0.30934678704195434, - "grad_norm": 0.7661826955072533, - "learning_rate": 9.704441026606047e-06, - "loss": 0.0957, - "step": 1165 - }, - { - "epoch": 0.3096123207647371, - "grad_norm": 0.22898596466557175, - "learning_rate": 9.703696959125144e-06, - "loss": 0.0038, - "step": 1166 - }, - { - "epoch": 0.3098778544875199, - "grad_norm": 1.0310557558318094, - "learning_rate": 9.702951984826943e-06, - "loss": 0.1113, - "step": 1167 - }, - { - "epoch": 0.3101433882103027, - "grad_norm": 0.9506415865468816, - "learning_rate": 9.702206103855065e-06, - "loss": 0.1367, - "step": 1168 - }, - { - "epoch": 0.3104089219330855, - "grad_norm": 0.9519356526243661, - "learning_rate": 9.701459316353309e-06, - "loss": 0.0752, - "step": 1169 - }, - { - "epoch": 0.3106744556558683, - "grad_norm": 0.4403895621363803, - "learning_rate": 9.700711622465645e-06, - "loss": 0.0391, - "step": 1170 - }, - { - "epoch": 0.3109399893786511, - "grad_norm": 0.8734691695278628, - "learning_rate": 9.699963022336219e-06, - "loss": 0.0815, - "step": 1171 - }, - { - "epoch": 0.3112055231014339, - "grad_norm": 0.03692544587443828, - "learning_rate": 9.69921351610935e-06, - "loss": 0.001, - "step": 1172 - }, - { - "epoch": 0.31147105682421666, - "grad_norm": 0.38902223564103433, - "learning_rate": 9.698463103929542e-06, - "loss": 0.0354, - "step": 1173 - }, - { - "epoch": 0.3117365905469995, - "grad_norm": 0.4322873123614092, - "learning_rate": 9.697711785941459e-06, - "loss": 0.0391, - "step": 1174 - }, - { - "epoch": 0.31200212426978224, - "grad_norm": 1.04700194984681, - "learning_rate": 9.696959562289949e-06, - "loss": 0.1035, - "step": 1175 - }, - { - "epoch": 0.31226765799256506, - "grad_norm": 0.6455068902710267, - "learning_rate": 9.69620643312003e-06, - "loss": 0.0525, - "step": 1176 - }, - { - "epoch": 0.3125331917153478, - "grad_norm": 0.19469026077885143, - "learning_rate": 9.695452398576897e-06, - "loss": 0.0206, - "step": 1177 - }, - { - "epoch": 0.31279872543813064, - "grad_norm": 0.552300258388761, - "learning_rate": 9.69469745880592e-06, - "loss": 0.0576, - "step": 1178 - }, - { - "epoch": 0.31306425916091346, - "grad_norm": 0.15378094703192938, - "learning_rate": 9.693941613952642e-06, - "loss": 0.0146, - "step": 1179 - }, - { - "epoch": 0.3133297928836962, - "grad_norm": 0.7149303778962087, - "learning_rate": 9.693184864162781e-06, - "loss": 0.0525, - "step": 1180 - }, - { - "epoch": 0.31359532660647904, - "grad_norm": 12.606742773671819, - "learning_rate": 9.692427209582227e-06, - "loss": 0.3516, - "step": 1181 - }, - { - "epoch": 0.3138608603292618, - "grad_norm": 0.02382454479686282, - "learning_rate": 9.691668650357052e-06, - "loss": 0.0005, - "step": 1182 - }, - { - "epoch": 0.3141263940520446, - "grad_norm": 0.6509559366250702, - "learning_rate": 9.690909186633493e-06, - "loss": 0.063, - "step": 1183 - }, - { - "epoch": 0.3143919277748274, - "grad_norm": 0.22407883750706376, - "learning_rate": 9.690148818557966e-06, - "loss": 0.0231, - "step": 1184 - }, - { - "epoch": 0.3146574614976102, - "grad_norm": 1.194851766940795, - "learning_rate": 9.689387546277062e-06, - "loss": 0.0752, - "step": 1185 - }, - { - "epoch": 0.31492299522039296, - "grad_norm": 1.5889517299989575, - "learning_rate": 9.688625369937548e-06, - "loss": 0.1113, - "step": 1186 - }, - { - "epoch": 0.3151885289431758, - "grad_norm": 0.7528605787401368, - "learning_rate": 9.687862289686356e-06, - "loss": 0.1035, - "step": 1187 - }, - { - "epoch": 0.3154540626659586, - "grad_norm": 0.10046386842043101, - "learning_rate": 9.687098305670606e-06, - "loss": 0.0025, - "step": 1188 - }, - { - "epoch": 0.31571959638874136, - "grad_norm": 0.744431616154801, - "learning_rate": 9.68633341803758e-06, - "loss": 0.1113, - "step": 1189 - }, - { - "epoch": 0.3159851301115242, - "grad_norm": 0.3171470340086223, - "learning_rate": 9.685567626934742e-06, - "loss": 0.0354, - "step": 1190 - }, - { - "epoch": 0.31625066383430694, - "grad_norm": 0.24491006741762908, - "learning_rate": 9.684800932509726e-06, - "loss": 0.0258, - "step": 1191 - }, - { - "epoch": 0.31651619755708976, - "grad_norm": 0.057338171719011885, - "learning_rate": 9.684033334910342e-06, - "loss": 0.0014, - "step": 1192 - }, - { - "epoch": 0.3167817312798725, - "grad_norm": 0.2935840183769047, - "learning_rate": 9.683264834284575e-06, - "loss": 0.0258, - "step": 1193 - }, - { - "epoch": 0.31704726500265534, - "grad_norm": 0.7590487718984419, - "learning_rate": 9.682495430780581e-06, - "loss": 0.1455, - "step": 1194 - }, - { - "epoch": 0.3173127987254381, - "grad_norm": 1.1684076403623338, - "learning_rate": 9.681725124546695e-06, - "loss": 0.0693, - "step": 1195 - }, - { - "epoch": 0.3175783324482209, - "grad_norm": 1.6438208494368356, - "learning_rate": 9.680953915731418e-06, - "loss": 0.082, - "step": 1196 - }, - { - "epoch": 0.31784386617100374, - "grad_norm": 0.8695124690281735, - "learning_rate": 9.680181804483435e-06, - "loss": 0.0479, - "step": 1197 - }, - { - "epoch": 0.3181093998937865, - "grad_norm": 0.20326281659079895, - "learning_rate": 9.679408790951597e-06, - "loss": 0.0231, - "step": 1198 - }, - { - "epoch": 0.3183749336165693, - "grad_norm": 0.6573685494777957, - "learning_rate": 9.678634875284932e-06, - "loss": 0.0187, - "step": 1199 - }, - { - "epoch": 0.3186404673393521, - "grad_norm": 0.7687488235704806, - "learning_rate": 9.677860057632642e-06, - "loss": 0.1455, - "step": 1200 - }, - { - "epoch": 0.3189060010621349, - "grad_norm": 0.12038614362749626, - "learning_rate": 9.677084338144105e-06, - "loss": 0.0117, - "step": 1201 - }, - { - "epoch": 0.31917153478491767, - "grad_norm": 0.22290803237319895, - "learning_rate": 9.676307716968866e-06, - "loss": 0.0258, - "step": 1202 - }, - { - "epoch": 0.3194370685077005, - "grad_norm": 0.08514537747421477, - "learning_rate": 9.675530194256652e-06, - "loss": 0.0018, - "step": 1203 - }, - { - "epoch": 0.31970260223048325, - "grad_norm": 0.5579141610152237, - "learning_rate": 9.674751770157362e-06, - "loss": 0.0233, - "step": 1204 - }, - { - "epoch": 0.31996813595326606, - "grad_norm": 0.312483949561754, - "learning_rate": 9.673972444821063e-06, - "loss": 0.0354, - "step": 1205 - }, - { - "epoch": 0.3202336696760489, - "grad_norm": 0.22701164555191825, - "learning_rate": 9.673192218398e-06, - "loss": 0.0258, - "step": 1206 - }, - { - "epoch": 0.32049920339883164, - "grad_norm": 0.1487192071650038, - "learning_rate": 9.672411091038592e-06, - "loss": 0.0146, - "step": 1207 - }, - { - "epoch": 0.32076473712161446, - "grad_norm": 1.3761300654988238, - "learning_rate": 9.671629062893433e-06, - "loss": 0.1279, - "step": 1208 - }, - { - "epoch": 0.3210302708443972, - "grad_norm": 0.1704844264079489, - "learning_rate": 9.670846134113286e-06, - "loss": 0.0184, - "step": 1209 - }, - { - "epoch": 0.32129580456718004, - "grad_norm": 0.19819201079023002, - "learning_rate": 9.670062304849093e-06, - "loss": 0.0047, - "step": 1210 - }, - { - "epoch": 0.3215613382899628, - "grad_norm": 0.1838454976972731, - "learning_rate": 9.669277575251965e-06, - "loss": 0.0052, - "step": 1211 - }, - { - "epoch": 0.3218268720127456, - "grad_norm": 0.6558129007222472, - "learning_rate": 9.668491945473191e-06, - "loss": 0.0148, - "step": 1212 - }, - { - "epoch": 0.3220924057355284, - "grad_norm": 0.34799526026312055, - "learning_rate": 9.667705415664228e-06, - "loss": 0.0393, - "step": 1213 - }, - { - "epoch": 0.3223579394583112, - "grad_norm": 0.5171512089272212, - "learning_rate": 9.666917985976712e-06, - "loss": 0.0148, - "step": 1214 - }, - { - "epoch": 0.322623473181094, - "grad_norm": 0.2062886511175874, - "learning_rate": 9.66612965656245e-06, - "loss": 0.0231, - "step": 1215 - }, - { - "epoch": 0.3228890069038768, - "grad_norm": 0.7738586973681455, - "learning_rate": 9.665340427573422e-06, - "loss": 0.1641, - "step": 1216 - }, - { - "epoch": 0.3231545406266596, - "grad_norm": 0.03671678604671888, - "learning_rate": 9.664550299161782e-06, - "loss": 0.0008, - "step": 1217 - }, - { - "epoch": 0.32342007434944237, - "grad_norm": 0.19085387586302657, - "learning_rate": 9.663759271479858e-06, - "loss": 0.0206, - "step": 1218 - }, - { - "epoch": 0.3236856080722252, - "grad_norm": 3.034553850378646, - "learning_rate": 9.66296734468015e-06, - "loss": 0.1553, - "step": 1219 - }, - { - "epoch": 0.32395114179500795, - "grad_norm": 0.026411465752670175, - "learning_rate": 9.662174518915332e-06, - "loss": 0.0006, - "step": 1220 - }, - { - "epoch": 0.32421667551779076, - "grad_norm": 0.08856791563455327, - "learning_rate": 9.661380794338252e-06, - "loss": 0.002, - "step": 1221 - }, - { - "epoch": 0.3244822092405735, - "grad_norm": 0.04449604335631291, - "learning_rate": 9.660586171101931e-06, - "loss": 0.0008, - "step": 1222 - }, - { - "epoch": 0.32474774296335635, - "grad_norm": 0.8025875942279423, - "learning_rate": 9.659790649359563e-06, - "loss": 0.1738, - "step": 1223 - }, - { - "epoch": 0.32501327668613916, - "grad_norm": 0.13222478587189693, - "learning_rate": 9.658994229264514e-06, - "loss": 0.0131, - "step": 1224 - }, - { - "epoch": 0.3252788104089219, - "grad_norm": 0.6260229647329109, - "learning_rate": 9.658196910970327e-06, - "loss": 0.0889, - "step": 1225 - }, - { - "epoch": 0.32554434413170474, - "grad_norm": 0.7658967348933747, - "learning_rate": 9.657398694630713e-06, - "loss": 0.1553, - "step": 1226 - }, - { - "epoch": 0.3258098778544875, - "grad_norm": 0.48947788594768415, - "learning_rate": 9.65659958039956e-06, - "loss": 0.0288, - "step": 1227 - }, - { - "epoch": 0.3260754115772703, - "grad_norm": 0.8585792462640025, - "learning_rate": 9.655799568430926e-06, - "loss": 0.0688, - "step": 1228 - }, - { - "epoch": 0.3263409453000531, - "grad_norm": 0.6527231816101399, - "learning_rate": 9.654998658879046e-06, - "loss": 0.1113, - "step": 1229 - }, - { - "epoch": 0.3266064790228359, - "grad_norm": 0.6582417207082769, - "learning_rate": 9.654196851898325e-06, - "loss": 0.1191, - "step": 1230 - }, - { - "epoch": 0.32687201274561867, - "grad_norm": 0.6645575253832666, - "learning_rate": 9.653394147643343e-06, - "loss": 0.1113, - "step": 1231 - }, - { - "epoch": 0.3271375464684015, - "grad_norm": 0.27026297493457185, - "learning_rate": 9.65259054626885e-06, - "loss": 0.0317, - "step": 1232 - }, - { - "epoch": 0.3274030801911843, - "grad_norm": 0.6383288218728779, - "learning_rate": 9.651786047929772e-06, - "loss": 0.1035, - "step": 1233 - }, - { - "epoch": 0.32766861391396707, - "grad_norm": 0.011355216588088562, - "learning_rate": 9.650980652781208e-06, - "loss": 0.0003, - "step": 1234 - }, - { - "epoch": 0.3279341476367499, - "grad_norm": 0.5036691216178738, - "learning_rate": 9.650174360978427e-06, - "loss": 0.0525, - "step": 1235 - }, - { - "epoch": 0.32819968135953265, - "grad_norm": 0.48956110736136377, - "learning_rate": 9.649367172676873e-06, - "loss": 0.0317, - "step": 1236 - }, - { - "epoch": 0.32846521508231546, - "grad_norm": 0.6193502355457858, - "learning_rate": 9.648559088032162e-06, - "loss": 0.0957, - "step": 1237 - }, - { - "epoch": 0.3287307488050982, - "grad_norm": 0.7256539061385159, - "learning_rate": 9.647750107200082e-06, - "loss": 0.0889, - "step": 1238 - }, - { - "epoch": 0.32899628252788105, - "grad_norm": 0.5224109219073716, - "learning_rate": 9.646940230336599e-06, - "loss": 0.0045, - "step": 1239 - }, - { - "epoch": 0.3292618162506638, - "grad_norm": 0.2037818062231921, - "learning_rate": 9.646129457597844e-06, - "loss": 0.0023, - "step": 1240 - }, - { - "epoch": 0.3295273499734466, - "grad_norm": 0.4607652592533831, - "learning_rate": 9.645317789140125e-06, - "loss": 0.0579, - "step": 1241 - }, - { - "epoch": 0.32979288369622944, - "grad_norm": 0.9063414384859513, - "learning_rate": 9.644505225119922e-06, - "loss": 0.1113, - "step": 1242 - }, - { - "epoch": 0.3300584174190122, - "grad_norm": 0.2770486401552903, - "learning_rate": 9.64369176569389e-06, - "loss": 0.0187, - "step": 1243 - }, - { - "epoch": 0.330323951141795, - "grad_norm": 0.4514415661962752, - "learning_rate": 9.642877411018852e-06, - "loss": 0.0525, - "step": 1244 - }, - { - "epoch": 0.3305894848645778, - "grad_norm": 0.4388178911161381, - "learning_rate": 9.642062161251807e-06, - "loss": 0.0525, - "step": 1245 - }, - { - "epoch": 0.3308550185873606, - "grad_norm": 0.5887566574525436, - "learning_rate": 9.641246016549925e-06, - "loss": 0.0579, - "step": 1246 - }, - { - "epoch": 0.33112055231014337, - "grad_norm": 2.5823916037440813, - "learning_rate": 9.64042897707055e-06, - "loss": 0.2344, - "step": 1247 - }, - { - "epoch": 0.3313860860329262, - "grad_norm": 0.3156225772793237, - "learning_rate": 9.639611042971198e-06, - "loss": 0.0258, - "step": 1248 - }, - { - "epoch": 0.33165161975570895, - "grad_norm": 0.4480395609995155, - "learning_rate": 9.638792214409556e-06, - "loss": 0.0479, - "step": 1249 - }, - { - "epoch": 0.33191715347849177, - "grad_norm": 0.7209237690266596, - "learning_rate": 9.637972491543482e-06, - "loss": 0.0957, - "step": 1250 - }, - { - "epoch": 0.3321826872012746, - "grad_norm": 0.790962238735122, - "learning_rate": 9.637151874531014e-06, - "loss": 0.1035, - "step": 1251 - }, - { - "epoch": 0.33244822092405735, - "grad_norm": 0.26847764212042974, - "learning_rate": 9.636330363530352e-06, - "loss": 0.0206, - "step": 1252 - }, - { - "epoch": 0.33271375464684017, - "grad_norm": 0.13095952869121744, - "learning_rate": 9.63550795869988e-06, - "loss": 0.0042, - "step": 1253 - }, - { - "epoch": 0.33297928836962293, - "grad_norm": 0.4485281612188027, - "learning_rate": 9.63468466019814e-06, - "loss": 0.0479, - "step": 1254 - }, - { - "epoch": 0.33324482209240575, - "grad_norm": 6.678236601224771, - "learning_rate": 9.633860468183862e-06, - "loss": 0.6133, - "step": 1255 - }, - { - "epoch": 0.3335103558151885, - "grad_norm": 0.03640018770094486, - "learning_rate": 9.633035382815936e-06, - "loss": 0.0009, - "step": 1256 - }, - { - "epoch": 0.3337758895379713, - "grad_norm": 0.20995200600370745, - "learning_rate": 9.63220940425343e-06, - "loss": 0.0147, - "step": 1257 - }, - { - "epoch": 0.3340414232607541, - "grad_norm": 0.7744165649910679, - "learning_rate": 9.631382532655583e-06, - "loss": 0.1035, - "step": 1258 - }, - { - "epoch": 0.3343069569835369, - "grad_norm": 0.8521274871394714, - "learning_rate": 9.630554768181806e-06, - "loss": 0.1279, - "step": 1259 - }, - { - "epoch": 0.3345724907063197, - "grad_norm": 0.013869617394598415, - "learning_rate": 9.62972611099168e-06, - "loss": 0.0004, - "step": 1260 - }, - { - "epoch": 0.3348380244291025, - "grad_norm": 0.7984701653609472, - "learning_rate": 9.628896561244965e-06, - "loss": 0.1201, - "step": 1261 - }, - { - "epoch": 0.3351035581518853, - "grad_norm": 0.18442932803572457, - "learning_rate": 9.628066119101583e-06, - "loss": 0.0093, - "step": 1262 - }, - { - "epoch": 0.33536909187466807, - "grad_norm": 0.7728685034480455, - "learning_rate": 9.627234784721637e-06, - "loss": 0.1113, - "step": 1263 - }, - { - "epoch": 0.3356346255974509, - "grad_norm": 0.28335521981673617, - "learning_rate": 9.6264025582654e-06, - "loss": 0.0233, - "step": 1264 - }, - { - "epoch": 0.33590015932023365, - "grad_norm": 0.399075752307739, - "learning_rate": 9.62556943989331e-06, - "loss": 0.0479, - "step": 1265 - }, - { - "epoch": 0.33616569304301647, - "grad_norm": 0.12658474094636998, - "learning_rate": 9.624735429765988e-06, - "loss": 0.0041, - "step": 1266 - }, - { - "epoch": 0.33643122676579923, - "grad_norm": 0.5280828189652471, - "learning_rate": 9.623900528044216e-06, - "loss": 0.0752, - "step": 1267 - }, - { - "epoch": 0.33669676048858205, - "grad_norm": 0.24243489448797254, - "learning_rate": 9.623064734888959e-06, - "loss": 0.0066, - "step": 1268 - }, - { - "epoch": 0.33696229421136487, - "grad_norm": 0.4700632898354242, - "learning_rate": 9.622228050461345e-06, - "loss": 0.0693, - "step": 1269 - }, - { - "epoch": 0.33722782793414763, - "grad_norm": 0.41440456235100415, - "learning_rate": 9.621390474922675e-06, - "loss": 0.0579, - "step": 1270 - }, - { - "epoch": 0.33749336165693045, - "grad_norm": 0.7223449992115132, - "learning_rate": 9.620552008434426e-06, - "loss": 0.0752, - "step": 1271 - }, - { - "epoch": 0.3377588953797132, - "grad_norm": 0.4596889344479549, - "learning_rate": 9.619712651158243e-06, - "loss": 0.0693, - "step": 1272 - }, - { - "epoch": 0.338024429102496, - "grad_norm": 0.4441012257660843, - "learning_rate": 9.618872403255947e-06, - "loss": 0.063, - "step": 1273 - }, - { - "epoch": 0.3382899628252788, - "grad_norm": 0.1553935211134242, - "learning_rate": 9.618031264889529e-06, - "loss": 0.0066, - "step": 1274 - }, - { - "epoch": 0.3385554965480616, - "grad_norm": 0.42387440934012344, - "learning_rate": 9.617189236221143e-06, - "loss": 0.0579, - "step": 1275 - }, - { - "epoch": 0.33882103027084437, - "grad_norm": 0.47243312888409494, - "learning_rate": 9.61634631741313e-06, - "loss": 0.0752, - "step": 1276 - }, - { - "epoch": 0.3390865639936272, - "grad_norm": 0.06774040615620845, - "learning_rate": 9.61550250862799e-06, - "loss": 0.0018, - "step": 1277 - }, - { - "epoch": 0.33935209771641, - "grad_norm": 0.38367232223776365, - "learning_rate": 9.614657810028402e-06, - "loss": 0.0525, - "step": 1278 - }, - { - "epoch": 0.33961763143919277, - "grad_norm": 0.3392200068088174, - "learning_rate": 9.613812221777212e-06, - "loss": 0.0131, - "step": 1279 - }, - { - "epoch": 0.3398831651619756, - "grad_norm": 0.42002324638804434, - "learning_rate": 9.612965744037441e-06, - "loss": 0.0479, - "step": 1280 - }, - { - "epoch": 0.34014869888475835, - "grad_norm": 0.5274772223331019, - "learning_rate": 9.61211837697228e-06, - "loss": 0.0693, - "step": 1281 - }, - { - "epoch": 0.34041423260754117, - "grad_norm": 0.35860555791803894, - "learning_rate": 9.61127012074509e-06, - "loss": 0.0479, - "step": 1282 - }, - { - "epoch": 0.34067976633032393, - "grad_norm": 0.5038903692242176, - "learning_rate": 9.610420975519407e-06, - "loss": 0.0752, - "step": 1283 - }, - { - "epoch": 0.34094530005310675, - "grad_norm": 0.4033059759112738, - "learning_rate": 9.609570941458935e-06, - "loss": 0.0579, - "step": 1284 - }, - { - "epoch": 0.3412108337758895, - "grad_norm": 0.5660112789357162, - "learning_rate": 9.60872001872755e-06, - "loss": 0.0752, - "step": 1285 - }, - { - "epoch": 0.34147636749867233, - "grad_norm": 0.3263429089982916, - "learning_rate": 9.607868207489297e-06, - "loss": 0.0393, - "step": 1286 - }, - { - "epoch": 0.34174190122145515, - "grad_norm": 0.37153713665523197, - "learning_rate": 9.607015507908401e-06, - "loss": 0.0479, - "step": 1287 - }, - { - "epoch": 0.3420074349442379, - "grad_norm": 0.4755797521549427, - "learning_rate": 9.606161920149251e-06, - "loss": 0.0579, - "step": 1288 - }, - { - "epoch": 0.3422729686670207, - "grad_norm": 0.5767413183549783, - "learning_rate": 9.605307444376407e-06, - "loss": 0.063, - "step": 1289 - }, - { - "epoch": 0.3425385023898035, - "grad_norm": 0.06779770472600782, - "learning_rate": 9.604452080754601e-06, - "loss": 0.0017, - "step": 1290 - }, - { - "epoch": 0.3428040361125863, - "grad_norm": 0.5248146878342517, - "learning_rate": 9.603595829448737e-06, - "loss": 0.0815, - "step": 1291 - }, - { - "epoch": 0.34306956983536907, - "grad_norm": 3.5626821708062617, - "learning_rate": 9.602738690623895e-06, - "loss": 0.377, - "step": 1292 - }, - { - "epoch": 0.3433351035581519, - "grad_norm": 0.5707292470276522, - "learning_rate": 9.601880664445313e-06, - "loss": 0.0957, - "step": 1293 - }, - { - "epoch": 0.34360063728093465, - "grad_norm": 0.41750308884117965, - "learning_rate": 9.601021751078416e-06, - "loss": 0.0393, - "step": 1294 - }, - { - "epoch": 0.34386617100371747, - "grad_norm": 0.5159271503322392, - "learning_rate": 9.60016195068879e-06, - "loss": 0.063, - "step": 1295 - }, - { - "epoch": 0.3441317047265003, - "grad_norm": 0.5216179998375932, - "learning_rate": 9.599301263442194e-06, - "loss": 0.0688, - "step": 1296 - }, - { - "epoch": 0.34439723844928305, - "grad_norm": 0.30090609879974806, - "learning_rate": 9.598439689504556e-06, - "loss": 0.0391, - "step": 1297 - }, - { - "epoch": 0.34466277217206587, - "grad_norm": 0.3707745206401245, - "learning_rate": 9.597577229041981e-06, - "loss": 0.0525, - "step": 1298 - }, - { - "epoch": 0.34492830589484863, - "grad_norm": 2.103947714882032, - "learning_rate": 9.596713882220739e-06, - "loss": 0.1035, - "step": 1299 - }, - { - "epoch": 0.34519383961763145, - "grad_norm": 0.5141149862836718, - "learning_rate": 9.595849649207274e-06, - "loss": 0.0693, - "step": 1300 - }, - { - "epoch": 0.3454593733404142, - "grad_norm": 0.44396106651225886, - "learning_rate": 9.594984530168198e-06, - "loss": 0.0525, - "step": 1301 - }, - { - "epoch": 0.34572490706319703, - "grad_norm": 0.3455247284911814, - "learning_rate": 9.594118525270299e-06, - "loss": 0.0479, - "step": 1302 - }, - { - "epoch": 0.34599044078597985, - "grad_norm": 0.7585784664106814, - "learning_rate": 9.593251634680531e-06, - "loss": 0.0889, - "step": 1303 - }, - { - "epoch": 0.3462559745087626, - "grad_norm": 0.36123832598406336, - "learning_rate": 9.59238385856602e-06, - "loss": 0.0479, - "step": 1304 - }, - { - "epoch": 0.3465215082315454, - "grad_norm": 0.5097193438244858, - "learning_rate": 9.591515197094064e-06, - "loss": 0.082, - "step": 1305 - }, - { - "epoch": 0.3467870419543282, - "grad_norm": 0.2653818151867503, - "learning_rate": 9.59064565043213e-06, - "loss": 0.0354, - "step": 1306 - }, - { - "epoch": 0.347052575677111, - "grad_norm": 0.5864846744145519, - "learning_rate": 9.589775218747854e-06, - "loss": 0.0889, - "step": 1307 - }, - { - "epoch": 0.34731810939989377, - "grad_norm": 0.3781614336308014, - "learning_rate": 9.58890390220905e-06, - "loss": 0.0131, - "step": 1308 - }, - { - "epoch": 0.3475836431226766, - "grad_norm": 0.30092084572433875, - "learning_rate": 9.588031700983693e-06, - "loss": 0.0393, - "step": 1309 - }, - { - "epoch": 0.34784917684545935, - "grad_norm": 0.35904973427780035, - "learning_rate": 9.587158615239935e-06, - "loss": 0.0354, - "step": 1310 - }, - { - "epoch": 0.34811471056824217, - "grad_norm": 0.5600811056316272, - "learning_rate": 9.586284645146096e-06, - "loss": 0.0579, - "step": 1311 - }, - { - "epoch": 0.348380244291025, - "grad_norm": 0.05426252506203742, - "learning_rate": 9.58540979087067e-06, - "loss": 0.0015, - "step": 1312 - }, - { - "epoch": 0.34864577801380775, - "grad_norm": 0.5870410341108383, - "learning_rate": 9.584534052582314e-06, - "loss": 0.1035, - "step": 1313 - }, - { - "epoch": 0.34891131173659057, - "grad_norm": 0.6294228311893199, - "learning_rate": 9.583657430449862e-06, - "loss": 0.082, - "step": 1314 - }, - { - "epoch": 0.34917684545937333, - "grad_norm": 0.4895276277368698, - "learning_rate": 9.582779924642317e-06, - "loss": 0.0184, - "step": 1315 - }, - { - "epoch": 0.34944237918215615, - "grad_norm": 0.5733825038864722, - "learning_rate": 9.581901535328853e-06, - "loss": 0.082, - "step": 1316 - }, - { - "epoch": 0.3497079129049389, - "grad_norm": 0.32782198798301165, - "learning_rate": 9.58102226267881e-06, - "loss": 0.0146, - "step": 1317 - }, - { - "epoch": 0.34997344662772173, - "grad_norm": 0.590823967005489, - "learning_rate": 9.580142106861704e-06, - "loss": 0.0693, - "step": 1318 - }, - { - "epoch": 0.3502389803505045, - "grad_norm": 0.28663624606998783, - "learning_rate": 9.579261068047217e-06, - "loss": 0.0147, - "step": 1319 - }, - { - "epoch": 0.3505045140732873, - "grad_norm": 0.07002167741569795, - "learning_rate": 9.578379146405202e-06, - "loss": 0.002, - "step": 1320 - }, - { - "epoch": 0.35077004779607013, - "grad_norm": 0.3823299154936616, - "learning_rate": 9.577496342105686e-06, - "loss": 0.0476, - "step": 1321 - }, - { - "epoch": 0.3510355815188529, - "grad_norm": 0.41555476819705756, - "learning_rate": 9.576612655318863e-06, - "loss": 0.0184, - "step": 1322 - }, - { - "epoch": 0.3513011152416357, - "grad_norm": 0.6565947270509365, - "learning_rate": 9.575728086215093e-06, - "loss": 0.0479, - "step": 1323 - }, - { - "epoch": 0.35156664896441847, - "grad_norm": 0.28826966637792456, - "learning_rate": 9.574842634964917e-06, - "loss": 0.0354, - "step": 1324 - }, - { - "epoch": 0.3518321826872013, - "grad_norm": 0.47440584122085944, - "learning_rate": 9.573956301739033e-06, - "loss": 0.0576, - "step": 1325 - }, - { - "epoch": 0.35209771640998405, - "grad_norm": 0.013825970291321273, - "learning_rate": 9.573069086708322e-06, - "loss": 0.0003, - "step": 1326 - }, - { - "epoch": 0.35236325013276687, - "grad_norm": 0.308417343496391, - "learning_rate": 9.572180990043822e-06, - "loss": 0.0393, - "step": 1327 - }, - { - "epoch": 0.35262878385554963, - "grad_norm": 0.023751366893711805, - "learning_rate": 9.571292011916753e-06, - "loss": 0.0006, - "step": 1328 - }, - { - "epoch": 0.35289431757833245, - "grad_norm": 0.6910550885091158, - "learning_rate": 9.570402152498498e-06, - "loss": 0.0688, - "step": 1329 - }, - { - "epoch": 0.35315985130111527, - "grad_norm": 0.5584726753157215, - "learning_rate": 9.56951141196061e-06, - "loss": 0.063, - "step": 1330 - }, - { - "epoch": 0.35342538502389803, - "grad_norm": 0.3771730171037822, - "learning_rate": 9.568619790474815e-06, - "loss": 0.0432, - "step": 1331 - }, - { - "epoch": 0.35369091874668085, - "grad_norm": 0.006598573947351961, - "learning_rate": 9.567727288213005e-06, - "loss": 0.0001, - "step": 1332 - }, - { - "epoch": 0.3539564524694636, - "grad_norm": 0.24569141953841403, - "learning_rate": 9.566833905347245e-06, - "loss": 0.0286, - "step": 1333 - }, - { - "epoch": 0.35422198619224643, - "grad_norm": 0.31994068602155107, - "learning_rate": 9.56593964204977e-06, - "loss": 0.0354, - "step": 1334 - }, - { - "epoch": 0.3544875199150292, - "grad_norm": 0.48426314470272797, - "learning_rate": 9.565044498492984e-06, - "loss": 0.0476, - "step": 1335 - }, - { - "epoch": 0.354753053637812, - "grad_norm": 0.4284293731700466, - "learning_rate": 9.564148474849455e-06, - "loss": 0.0476, - "step": 1336 - }, - { - "epoch": 0.3550185873605948, - "grad_norm": 0.5223639882465799, - "learning_rate": 9.563251571291932e-06, - "loss": 0.0432, - "step": 1337 - }, - { - "epoch": 0.3552841210833776, - "grad_norm": 0.3682885994989024, - "learning_rate": 9.562353787993321e-06, - "loss": 0.0432, - "step": 1338 - }, - { - "epoch": 0.3555496548061604, - "grad_norm": 0.7276856913645094, - "learning_rate": 9.56145512512671e-06, - "loss": 0.0957, - "step": 1339 - }, - { - "epoch": 0.35581518852894317, - "grad_norm": 0.28021489029743807, - "learning_rate": 9.56055558286535e-06, - "loss": 0.0317, - "step": 1340 - }, - { - "epoch": 0.356080722251726, - "grad_norm": 0.01832087245829523, - "learning_rate": 9.559655161382658e-06, - "loss": 0.0004, - "step": 1341 - }, - { - "epoch": 0.35634625597450875, - "grad_norm": 0.914955086500517, - "learning_rate": 9.558753860852228e-06, - "loss": 0.0752, - "step": 1342 - }, - { - "epoch": 0.35661178969729157, - "grad_norm": 0.20459946602158102, - "learning_rate": 9.557851681447818e-06, - "loss": 0.0206, - "step": 1343 - }, - { - "epoch": 0.35687732342007433, - "grad_norm": 0.27576350826669555, - "learning_rate": 9.556948623343358e-06, - "loss": 0.0286, - "step": 1344 - }, - { - "epoch": 0.35714285714285715, - "grad_norm": 0.2761915334889806, - "learning_rate": 9.55604468671295e-06, - "loss": 0.0255, - "step": 1345 - }, - { - "epoch": 0.3574083908656399, - "grad_norm": 0.8233226661583315, - "learning_rate": 9.555139871730859e-06, - "loss": 0.0354, - "step": 1346 - }, - { - "epoch": 0.35767392458842273, - "grad_norm": 0.1926615669215923, - "learning_rate": 9.554234178571524e-06, - "loss": 0.0165, - "step": 1347 - }, - { - "epoch": 0.35793945831120555, - "grad_norm": 0.3098753295444041, - "learning_rate": 9.55332760740955e-06, - "loss": 0.0286, - "step": 1348 - }, - { - "epoch": 0.3582049920339883, - "grad_norm": 3.431526479191698, - "learning_rate": 9.552420158419715e-06, - "loss": 0.1191, - "step": 1349 - }, - { - "epoch": 0.35847052575677113, - "grad_norm": 0.0625932742877244, - "learning_rate": 9.551511831776966e-06, - "loss": 0.001, - "step": 1350 - }, - { - "epoch": 0.3587360594795539, - "grad_norm": 1.1478320124700105, - "learning_rate": 9.550602627656415e-06, - "loss": 0.0889, - "step": 1351 - }, - { - "epoch": 0.3590015932023367, - "grad_norm": 1.003238754359967, - "learning_rate": 9.549692546233348e-06, - "loss": 0.0688, - "step": 1352 - }, - { - "epoch": 0.3592671269251195, - "grad_norm": 0.25770729335411, - "learning_rate": 9.548781587683214e-06, - "loss": 0.0061, - "step": 1353 - }, - { - "epoch": 0.3595326606479023, - "grad_norm": 1.55958556462164, - "learning_rate": 9.54786975218164e-06, - "loss": 0.0688, - "step": 1354 - }, - { - "epoch": 0.35979819437068505, - "grad_norm": 1.2409731636678565, - "learning_rate": 9.546957039904414e-06, - "loss": 0.0688, - "step": 1355 - }, - { - "epoch": 0.36006372809346787, - "grad_norm": 0.032263196311481473, - "learning_rate": 9.546043451027498e-06, - "loss": 0.0007, - "step": 1356 - }, - { - "epoch": 0.3603292618162507, - "grad_norm": 0.4590699936620603, - "learning_rate": 9.54512898572702e-06, - "loss": 0.0205, - "step": 1357 - }, - { - "epoch": 0.36059479553903345, - "grad_norm": 6.157784939523085, - "learning_rate": 9.544213644179278e-06, - "loss": 0.4668, - "step": 1358 - }, - { - "epoch": 0.36086032926181627, - "grad_norm": 1.0445567828018743, - "learning_rate": 9.54329742656074e-06, - "loss": 0.1035, - "step": 1359 - }, - { - "epoch": 0.36112586298459903, - "grad_norm": 0.900975787190914, - "learning_rate": 9.54238033304804e-06, - "loss": 0.0525, - "step": 1360 - }, - { - "epoch": 0.36139139670738185, - "grad_norm": 2.1445461499571827, - "learning_rate": 9.541462363817986e-06, - "loss": 0.1279, - "step": 1361 - }, - { - "epoch": 0.3616569304301646, - "grad_norm": 2.6058708901982754, - "learning_rate": 9.540543519047547e-06, - "loss": 0.0889, - "step": 1362 - }, - { - "epoch": 0.36192246415294743, - "grad_norm": 0.30837506165256673, - "learning_rate": 9.53962379891387e-06, - "loss": 0.0255, - "step": 1363 - }, - { - "epoch": 0.3621879978757302, - "grad_norm": 0.6781227526654185, - "learning_rate": 9.538703203594266e-06, - "loss": 0.0476, - "step": 1364 - }, - { - "epoch": 0.362453531598513, - "grad_norm": 0.0014988796322645392, - "learning_rate": 9.53778173326621e-06, - "loss": 0.0, - "step": 1365 - }, - { - "epoch": 0.36271906532129583, - "grad_norm": 0.4848181673027792, - "learning_rate": 9.536859388107359e-06, - "loss": 0.0354, - "step": 1366 - }, - { - "epoch": 0.3629845990440786, - "grad_norm": 0.30798774656542016, - "learning_rate": 9.535936168295521e-06, - "loss": 0.0255, - "step": 1367 - }, - { - "epoch": 0.3632501327668614, - "grad_norm": 1.1103162493296537, - "learning_rate": 9.535012074008688e-06, - "loss": 0.1191, - "step": 1368 - }, - { - "epoch": 0.3635156664896442, - "grad_norm": 1.115968824289187, - "learning_rate": 9.53408710542501e-06, - "loss": 0.1934, - "step": 1369 - }, - { - "epoch": 0.363781200212427, - "grad_norm": 0.1558534145654595, - "learning_rate": 9.533161262722815e-06, - "loss": 0.0131, - "step": 1370 - }, - { - "epoch": 0.36404673393520975, - "grad_norm": 0.042070066792334465, - "learning_rate": 9.532234546080592e-06, - "loss": 0.0008, - "step": 1371 - }, - { - "epoch": 0.3643122676579926, - "grad_norm": 0.05020993746733037, - "learning_rate": 9.531306955677003e-06, - "loss": 0.0012, - "step": 1372 - }, - { - "epoch": 0.36457780138077533, - "grad_norm": 0.3478946473030989, - "learning_rate": 9.530378491690872e-06, - "loss": 0.0255, - "step": 1373 - }, - { - "epoch": 0.36484333510355815, - "grad_norm": 0.532671213666101, - "learning_rate": 9.5294491543012e-06, - "loss": 0.0286, - "step": 1374 - }, - { - "epoch": 0.36510886882634097, - "grad_norm": 0.0104645404151327, - "learning_rate": 9.52851894368715e-06, - "loss": 0.0002, - "step": 1375 - }, - { - "epoch": 0.36537440254912373, - "grad_norm": 0.6407476894876308, - "learning_rate": 9.527587860028055e-06, - "loss": 0.0165, - "step": 1376 - }, - { - "epoch": 0.36563993627190655, - "grad_norm": 1.683239750297336, - "learning_rate": 9.526655903503423e-06, - "loss": 0.0688, - "step": 1377 - }, - { - "epoch": 0.3659054699946893, - "grad_norm": 0.1956603056666467, - "learning_rate": 9.525723074292916e-06, - "loss": 0.0184, - "step": 1378 - }, - { - "epoch": 0.36617100371747213, - "grad_norm": 0.6108332993871155, - "learning_rate": 9.524789372576377e-06, - "loss": 0.0317, - "step": 1379 - }, - { - "epoch": 0.3664365374402549, - "grad_norm": 0.03952181334754172, - "learning_rate": 9.523854798533814e-06, - "loss": 0.0012, - "step": 1380 - }, - { - "epoch": 0.3667020711630377, - "grad_norm": 0.18831659774689863, - "learning_rate": 9.522919352345398e-06, - "loss": 0.0024, - "step": 1381 - }, - { - "epoch": 0.3669676048858205, - "grad_norm": 0.8786083910112925, - "learning_rate": 9.521983034191472e-06, - "loss": 0.0432, - "step": 1382 - }, - { - "epoch": 0.3672331386086033, - "grad_norm": 0.1730242308325762, - "learning_rate": 9.521045844252552e-06, - "loss": 0.0146, - "step": 1383 - }, - { - "epoch": 0.3674986723313861, - "grad_norm": 1.2876116546269887, - "learning_rate": 9.520107782709314e-06, - "loss": 0.0815, - "step": 1384 - }, - { - "epoch": 0.3677642060541689, - "grad_norm": 1.3082256972483228, - "learning_rate": 9.519168849742603e-06, - "loss": 0.0752, - "step": 1385 - }, - { - "epoch": 0.3680297397769517, - "grad_norm": 0.9335061419191978, - "learning_rate": 9.518229045533438e-06, - "loss": 0.0576, - "step": 1386 - }, - { - "epoch": 0.36829527349973445, - "grad_norm": 2.446950303059504, - "learning_rate": 9.517288370263e-06, - "loss": 0.2246, - "step": 1387 - }, - { - "epoch": 0.3685608072225173, - "grad_norm": 0.6333100630168161, - "learning_rate": 9.51634682411264e-06, - "loss": 0.0123, - "step": 1388 - }, - { - "epoch": 0.36882634094530004, - "grad_norm": 1.1503551358840165, - "learning_rate": 9.51540440726388e-06, - "loss": 0.1035, - "step": 1389 - }, - { - "epoch": 0.36909187466808285, - "grad_norm": 0.3449715903261567, - "learning_rate": 9.514461119898401e-06, - "loss": 0.0092, - "step": 1390 - }, - { - "epoch": 0.3693574083908656, - "grad_norm": 0.23906913907246705, - "learning_rate": 9.513516962198064e-06, - "loss": 0.0229, - "step": 1391 - }, - { - "epoch": 0.36962294211364843, - "grad_norm": 0.4848006955734177, - "learning_rate": 9.512571934344887e-06, - "loss": 0.0117, - "step": 1392 - }, - { - "epoch": 0.36988847583643125, - "grad_norm": 0.9490909420873752, - "learning_rate": 9.511626036521064e-06, - "loss": 0.0889, - "step": 1393 - }, - { - "epoch": 0.370154009559214, - "grad_norm": 3.92147853906124, - "learning_rate": 9.510679268908949e-06, - "loss": 0.1934, - "step": 1394 - }, - { - "epoch": 0.37041954328199683, - "grad_norm": 2.8735279348285796, - "learning_rate": 9.509731631691071e-06, - "loss": 0.2129, - "step": 1395 - }, - { - "epoch": 0.3706850770047796, - "grad_norm": 0.2436877641705072, - "learning_rate": 9.50878312505012e-06, - "loss": 0.0205, - "step": 1396 - }, - { - "epoch": 0.3709506107275624, - "grad_norm": 1.3855718841062967, - "learning_rate": 9.50783374916896e-06, - "loss": 0.063, - "step": 1397 - }, - { - "epoch": 0.3712161444503452, - "grad_norm": 0.39133070889265453, - "learning_rate": 9.506883504230618e-06, - "loss": 0.0354, - "step": 1398 - }, - { - "epoch": 0.371481678173128, - "grad_norm": 0.6442273431688003, - "learning_rate": 9.50593239041829e-06, - "loss": 0.0476, - "step": 1399 - }, - { - "epoch": 0.37174721189591076, - "grad_norm": 0.3654458756752601, - "learning_rate": 9.504980407915338e-06, - "loss": 0.0354, - "step": 1400 - }, - { - "epoch": 0.3720127456186936, - "grad_norm": 0.8586712209152337, - "learning_rate": 9.504027556905297e-06, - "loss": 0.1279, - "step": 1401 - }, - { - "epoch": 0.3722782793414764, - "grad_norm": 0.6773527175343205, - "learning_rate": 9.503073837571862e-06, - "loss": 0.0576, - "step": 1402 - }, - { - "epoch": 0.37254381306425915, - "grad_norm": 0.4186954158863423, - "learning_rate": 9.5021192500989e-06, - "loss": 0.0352, - "step": 1403 - }, - { - "epoch": 0.372809346787042, - "grad_norm": 1.9132653529788708, - "learning_rate": 9.501163794670445e-06, - "loss": 0.1738, - "step": 1404 - }, - { - "epoch": 0.37307488050982474, - "grad_norm": 2.2952198425920582, - "learning_rate": 9.500207471470695e-06, - "loss": 0.0815, - "step": 1405 - }, - { - "epoch": 0.37334041423260755, - "grad_norm": 0.40586858759644967, - "learning_rate": 9.49925028068402e-06, - "loss": 0.0165, - "step": 1406 - }, - { - "epoch": 0.3736059479553903, - "grad_norm": 0.18480641707554119, - "learning_rate": 9.498292222494955e-06, - "loss": 0.0035, - "step": 1407 - }, - { - "epoch": 0.37387148167817313, - "grad_norm": 0.7772486998466412, - "learning_rate": 9.497333297088202e-06, - "loss": 0.1191, - "step": 1408 - }, - { - "epoch": 0.3741370154009559, - "grad_norm": 0.2965999353034397, - "learning_rate": 9.496373504648632e-06, - "loss": 0.0286, - "step": 1409 - }, - { - "epoch": 0.3744025491237387, - "grad_norm": 2.294123343793471, - "learning_rate": 9.495412845361279e-06, - "loss": 0.1934, - "step": 1410 - }, - { - "epoch": 0.37466808284652153, - "grad_norm": 0.0026701934360799534, - "learning_rate": 9.494451319411347e-06, - "loss": 0.0001, - "step": 1411 - }, - { - "epoch": 0.3749336165693043, - "grad_norm": 1.4911265196857602, - "learning_rate": 9.493488926984208e-06, - "loss": 0.063, - "step": 1412 - }, - { - "epoch": 0.3751991502920871, - "grad_norm": 0.9596610809296374, - "learning_rate": 9.4925256682654e-06, - "loss": 0.0752, - "step": 1413 - }, - { - "epoch": 0.3754646840148699, - "grad_norm": 0.8806164654939721, - "learning_rate": 9.491561543440627e-06, - "loss": 0.0391, - "step": 1414 - }, - { - "epoch": 0.3757302177376527, - "grad_norm": 0.7690458874265063, - "learning_rate": 9.490596552695762e-06, - "loss": 0.1191, - "step": 1415 - }, - { - "epoch": 0.37599575146043546, - "grad_norm": 0.6872344076762994, - "learning_rate": 9.489630696216842e-06, - "loss": 0.0576, - "step": 1416 - }, - { - "epoch": 0.3762612851832183, - "grad_norm": 0.48727643854025365, - "learning_rate": 9.488663974190077e-06, - "loss": 0.0354, - "step": 1417 - }, - { - "epoch": 0.37652681890600104, - "grad_norm": 0.7530220920460418, - "learning_rate": 9.487696386801835e-06, - "loss": 0.0752, - "step": 1418 - }, - { - "epoch": 0.37679235262878386, - "grad_norm": 0.29673241857480187, - "learning_rate": 9.486727934238656e-06, - "loss": 0.0079, - "step": 1419 - }, - { - "epoch": 0.3770578863515667, - "grad_norm": 0.3529062224624686, - "learning_rate": 9.485758616687248e-06, - "loss": 0.0093, - "step": 1420 - }, - { - "epoch": 0.37732342007434944, - "grad_norm": 0.09031390787732817, - "learning_rate": 9.484788434334484e-06, - "loss": 0.0028, - "step": 1421 - }, - { - "epoch": 0.37758895379713225, - "grad_norm": 0.6189002563525746, - "learning_rate": 9.483817387367403e-06, - "loss": 0.0889, - "step": 1422 - }, - { - "epoch": 0.377854487519915, - "grad_norm": 0.8659301327268721, - "learning_rate": 9.482845475973212e-06, - "loss": 0.1113, - "step": 1423 - }, - { - "epoch": 0.37812002124269783, - "grad_norm": 0.025135957238805916, - "learning_rate": 9.481872700339282e-06, - "loss": 0.0007, - "step": 1424 - }, - { - "epoch": 0.3783855549654806, - "grad_norm": 0.20693260579313302, - "learning_rate": 9.480899060653154e-06, - "loss": 0.0105, - "step": 1425 - }, - { - "epoch": 0.3786510886882634, - "grad_norm": 3.127258712138675, - "learning_rate": 9.479924557102536e-06, - "loss": 0.2344, - "step": 1426 - }, - { - "epoch": 0.3789166224110462, - "grad_norm": 0.20096836647101268, - "learning_rate": 9.478949189875299e-06, - "loss": 0.004, - "step": 1427 - }, - { - "epoch": 0.379182156133829, - "grad_norm": 1.2506042183343944, - "learning_rate": 9.47797295915948e-06, - "loss": 0.1113, - "step": 1428 - }, - { - "epoch": 0.3794476898566118, - "grad_norm": 0.6735848636167625, - "learning_rate": 9.47699586514329e-06, - "loss": 0.0688, - "step": 1429 - }, - { - "epoch": 0.3797132235793946, - "grad_norm": 0.8729493010310873, - "learning_rate": 9.476017908015098e-06, - "loss": 0.0815, - "step": 1430 - }, - { - "epoch": 0.3799787573021774, - "grad_norm": 0.19365690895180718, - "learning_rate": 9.475039087963443e-06, - "loss": 0.0041, - "step": 1431 - }, - { - "epoch": 0.38024429102496016, - "grad_norm": 0.5438261709069115, - "learning_rate": 9.47405940517703e-06, - "loss": 0.063, - "step": 1432 - }, - { - "epoch": 0.380509824747743, - "grad_norm": 0.32752487349022863, - "learning_rate": 9.47307885984473e-06, - "loss": 0.0432, - "step": 1433 - }, - { - "epoch": 0.38077535847052574, - "grad_norm": 0.45526266402944837, - "learning_rate": 9.47209745215558e-06, - "loss": 0.032, - "step": 1434 - }, - { - "epoch": 0.38104089219330856, - "grad_norm": 1.0213144942650498, - "learning_rate": 9.471115182298786e-06, - "loss": 0.1035, - "step": 1435 - }, - { - "epoch": 0.3813064259160913, - "grad_norm": 0.4413563467345107, - "learning_rate": 9.470132050463716e-06, - "loss": 0.0354, - "step": 1436 - }, - { - "epoch": 0.38157195963887414, - "grad_norm": 0.0613716953521828, - "learning_rate": 9.469148056839907e-06, - "loss": 0.002, - "step": 1437 - }, - { - "epoch": 0.38183749336165695, - "grad_norm": 0.8153858333079927, - "learning_rate": 9.468163201617063e-06, - "loss": 0.082, - "step": 1438 - }, - { - "epoch": 0.3821030270844397, - "grad_norm": 0.5563381804784219, - "learning_rate": 9.467177484985048e-06, - "loss": 0.063, - "step": 1439 - }, - { - "epoch": 0.38236856080722254, - "grad_norm": 0.4245368835018735, - "learning_rate": 9.466190907133901e-06, - "loss": 0.0479, - "step": 1440 - }, - { - "epoch": 0.3826340945300053, - "grad_norm": 0.33400027625309914, - "learning_rate": 9.46520346825382e-06, - "loss": 0.0432, - "step": 1441 - }, - { - "epoch": 0.3828996282527881, - "grad_norm": 0.14631192270559776, - "learning_rate": 9.464215168535175e-06, - "loss": 0.0045, - "step": 1442 - }, - { - "epoch": 0.3831651619755709, - "grad_norm": 0.4620703244763112, - "learning_rate": 9.463226008168493e-06, - "loss": 0.0432, - "step": 1443 - }, - { - "epoch": 0.3834306956983537, - "grad_norm": 0.6625505596890234, - "learning_rate": 9.462235987344478e-06, - "loss": 0.0579, - "step": 1444 - }, - { - "epoch": 0.38369622942113646, - "grad_norm": 0.24691441798104835, - "learning_rate": 9.461245106253991e-06, - "loss": 0.0105, - "step": 1445 - }, - { - "epoch": 0.3839617631439193, - "grad_norm": 0.6337463070104612, - "learning_rate": 9.460253365088063e-06, - "loss": 0.0752, - "step": 1446 - }, - { - "epoch": 0.3842272968667021, - "grad_norm": 0.08857326498123558, - "learning_rate": 9.45926076403789e-06, - "loss": 0.0028, - "step": 1447 - }, - { - "epoch": 0.38449283058948486, - "grad_norm": 0.614060872702447, - "learning_rate": 9.458267303294833e-06, - "loss": 0.0957, - "step": 1448 - }, - { - "epoch": 0.3847583643122677, - "grad_norm": 0.34706341100621624, - "learning_rate": 9.457272983050421e-06, - "loss": 0.0432, - "step": 1449 - }, - { - "epoch": 0.38502389803505044, - "grad_norm": 0.5244496312054608, - "learning_rate": 9.456277803496345e-06, - "loss": 0.082, - "step": 1450 - }, - { - "epoch": 0.38528943175783326, - "grad_norm": 0.04965359509992197, - "learning_rate": 9.455281764824467e-06, - "loss": 0.0015, - "step": 1451 - }, - { - "epoch": 0.385554965480616, - "grad_norm": 0.4205660220211263, - "learning_rate": 9.45428486722681e-06, - "loss": 0.0286, - "step": 1452 - }, - { - "epoch": 0.38582049920339884, - "grad_norm": 0.5607004309206753, - "learning_rate": 9.453287110895564e-06, - "loss": 0.063, - "step": 1453 - }, - { - "epoch": 0.3860860329261816, - "grad_norm": 0.47148899218207774, - "learning_rate": 9.452288496023085e-06, - "loss": 0.0576, - "step": 1454 - }, - { - "epoch": 0.3863515666489644, - "grad_norm": 0.46004679938747756, - "learning_rate": 9.451289022801894e-06, - "loss": 0.0579, - "step": 1455 - }, - { - "epoch": 0.38661710037174724, - "grad_norm": 0.4292369995431107, - "learning_rate": 9.450288691424676e-06, - "loss": 0.0317, - "step": 1456 - }, - { - "epoch": 0.38688263409453, - "grad_norm": 0.9283721518542083, - "learning_rate": 9.449287502084284e-06, - "loss": 0.0957, - "step": 1457 - }, - { - "epoch": 0.3871481678173128, - "grad_norm": 1.199281966245374, - "learning_rate": 9.448285454973739e-06, - "loss": 0.063, - "step": 1458 - }, - { - "epoch": 0.3874137015400956, - "grad_norm": 1.5176795239281788, - "learning_rate": 9.447282550286217e-06, - "loss": 0.1191, - "step": 1459 - }, - { - "epoch": 0.3876792352628784, - "grad_norm": 0.574751228105574, - "learning_rate": 9.446278788215074e-06, - "loss": 0.0889, - "step": 1460 - }, - { - "epoch": 0.38794476898566116, - "grad_norm": 0.05157646996776102, - "learning_rate": 9.445274168953818e-06, - "loss": 0.0012, - "step": 1461 - }, - { - "epoch": 0.388210302708444, - "grad_norm": 0.7857880559484768, - "learning_rate": 9.444268692696129e-06, - "loss": 0.082, - "step": 1462 - }, - { - "epoch": 0.38847583643122674, - "grad_norm": 0.3007386860700484, - "learning_rate": 9.44326235963585e-06, - "loss": 0.0393, - "step": 1463 - }, - { - "epoch": 0.38874137015400956, - "grad_norm": 0.3055671787388409, - "learning_rate": 9.442255169966995e-06, - "loss": 0.0354, - "step": 1464 - }, - { - "epoch": 0.3890069038767924, - "grad_norm": 0.5568087984102646, - "learning_rate": 9.441247123883733e-06, - "loss": 0.0889, - "step": 1465 - }, - { - "epoch": 0.38927243759957514, - "grad_norm": 0.5895505413874539, - "learning_rate": 9.440238221580404e-06, - "loss": 0.1035, - "step": 1466 - }, - { - "epoch": 0.38953797132235796, - "grad_norm": 0.5616300389774914, - "learning_rate": 9.439228463251515e-06, - "loss": 0.0957, - "step": 1467 - }, - { - "epoch": 0.3898035050451407, - "grad_norm": 0.33063941694631, - "learning_rate": 9.438217849091732e-06, - "loss": 0.0393, - "step": 1468 - }, - { - "epoch": 0.39006903876792354, - "grad_norm": 0.2674438568163559, - "learning_rate": 9.437206379295895e-06, - "loss": 0.0354, - "step": 1469 - }, - { - "epoch": 0.3903345724907063, - "grad_norm": 0.7808160049243492, - "learning_rate": 9.436194054058998e-06, - "loss": 0.0752, - "step": 1470 - }, - { - "epoch": 0.3906001062134891, - "grad_norm": 0.5509891118998486, - "learning_rate": 9.435180873576208e-06, - "loss": 0.0957, - "step": 1471 - }, - { - "epoch": 0.3908656399362719, - "grad_norm": 0.6425294720673329, - "learning_rate": 9.434166838042854e-06, - "loss": 0.0889, - "step": 1472 - }, - { - "epoch": 0.3911311736590547, - "grad_norm": 1.8808615893246776, - "learning_rate": 9.433151947654428e-06, - "loss": 0.1279, - "step": 1473 - }, - { - "epoch": 0.3913967073818375, - "grad_norm": 0.6663733492038063, - "learning_rate": 9.432136202606594e-06, - "loss": 0.0184, - "step": 1474 - }, - { - "epoch": 0.3916622411046203, - "grad_norm": 0.5208712307214396, - "learning_rate": 9.431119603095169e-06, - "loss": 0.0889, - "step": 1475 - }, - { - "epoch": 0.3919277748274031, - "grad_norm": 0.2713853966362412, - "learning_rate": 9.430102149316147e-06, - "loss": 0.0354, - "step": 1476 - }, - { - "epoch": 0.39219330855018586, - "grad_norm": 0.5349973334569837, - "learning_rate": 9.429083841465679e-06, - "loss": 0.0957, - "step": 1477 - }, - { - "epoch": 0.3924588422729687, - "grad_norm": 0.1267971200504465, - "learning_rate": 9.428064679740081e-06, - "loss": 0.0025, - "step": 1478 - }, - { - "epoch": 0.39272437599575144, - "grad_norm": 0.5108936337689303, - "learning_rate": 9.427044664335837e-06, - "loss": 0.0815, - "step": 1479 - }, - { - "epoch": 0.39298990971853426, - "grad_norm": 0.33305136147342523, - "learning_rate": 9.426023795449595e-06, - "loss": 0.0479, - "step": 1480 - }, - { - "epoch": 0.393255443441317, - "grad_norm": 0.2984668226042693, - "learning_rate": 9.425002073278165e-06, - "loss": 0.0432, - "step": 1481 - }, - { - "epoch": 0.39352097716409984, - "grad_norm": 0.33394165373144796, - "learning_rate": 9.423979498018521e-06, - "loss": 0.0432, - "step": 1482 - }, - { - "epoch": 0.39378651088688266, - "grad_norm": 0.49293937391002435, - "learning_rate": 9.422956069867808e-06, - "loss": 0.0889, - "step": 1483 - }, - { - "epoch": 0.3940520446096654, - "grad_norm": 0.5210948201626876, - "learning_rate": 9.421931789023329e-06, - "loss": 0.0579, - "step": 1484 - }, - { - "epoch": 0.39431757833244824, - "grad_norm": 0.41304821586273616, - "learning_rate": 9.420906655682553e-06, - "loss": 0.0432, - "step": 1485 - }, - { - "epoch": 0.394583112055231, - "grad_norm": 0.46657739059291675, - "learning_rate": 9.41988067004311e-06, - "loss": 0.0354, - "step": 1486 - }, - { - "epoch": 0.3948486457780138, - "grad_norm": 0.026843605916078473, - "learning_rate": 9.418853832302806e-06, - "loss": 0.0007, - "step": 1487 - }, - { - "epoch": 0.3951141795007966, - "grad_norm": 0.2666054121708235, - "learning_rate": 9.417826142659596e-06, - "loss": 0.0103, - "step": 1488 - }, - { - "epoch": 0.3953797132235794, - "grad_norm": 0.3517104605858895, - "learning_rate": 9.416797601311611e-06, - "loss": 0.032, - "step": 1489 - }, - { - "epoch": 0.39564524694636216, - "grad_norm": 0.448265208584516, - "learning_rate": 9.415768208457139e-06, - "loss": 0.0693, - "step": 1490 - }, - { - "epoch": 0.395910780669145, - "grad_norm": 0.4432887319257767, - "learning_rate": 9.414737964294636e-06, - "loss": 0.063, - "step": 1491 - }, - { - "epoch": 0.3961763143919278, - "grad_norm": 0.15615182853960044, - "learning_rate": 9.41370686902272e-06, - "loss": 0.0057, - "step": 1492 - }, - { - "epoch": 0.39644184811471056, - "grad_norm": 0.40174854437847995, - "learning_rate": 9.412674922840175e-06, - "loss": 0.0432, - "step": 1493 - }, - { - "epoch": 0.3967073818374934, - "grad_norm": 0.5334231337886949, - "learning_rate": 9.411642125945945e-06, - "loss": 0.0752, - "step": 1494 - }, - { - "epoch": 0.39697291556027614, - "grad_norm": 0.39350262505624917, - "learning_rate": 9.410608478539146e-06, - "loss": 0.0525, - "step": 1495 - }, - { - "epoch": 0.39723844928305896, - "grad_norm": 0.5104750407099554, - "learning_rate": 9.409573980819052e-06, - "loss": 0.0432, - "step": 1496 - }, - { - "epoch": 0.3975039830058417, - "grad_norm": 0.018301303401147544, - "learning_rate": 9.408538632985098e-06, - "loss": 0.0005, - "step": 1497 - }, - { - "epoch": 0.39776951672862454, - "grad_norm": 2.925231221732229, - "learning_rate": 9.407502435236889e-06, - "loss": 0.0184, - "step": 1498 - }, - { - "epoch": 0.3980350504514073, - "grad_norm": 0.43976667471652925, - "learning_rate": 9.406465387774194e-06, - "loss": 0.032, - "step": 1499 - }, - { - "epoch": 0.3983005841741901, - "grad_norm": 0.004447437986864806, - "learning_rate": 9.405427490796941e-06, - "loss": 0.0001, - "step": 1500 - }, - { - "epoch": 0.39856611789697294, - "grad_norm": 0.22189544659424998, - "learning_rate": 9.404388744505225e-06, - "loss": 0.0147, - "step": 1501 - }, - { - "epoch": 0.3988316516197557, - "grad_norm": 0.5323862127877603, - "learning_rate": 9.403349149099304e-06, - "loss": 0.0354, - "step": 1502 - }, - { - "epoch": 0.3990971853425385, - "grad_norm": 0.7824556156322893, - "learning_rate": 9.4023087047796e-06, - "loss": 0.1035, - "step": 1503 - }, - { - "epoch": 0.3993627190653213, - "grad_norm": 0.537555019938912, - "learning_rate": 9.401267411746697e-06, - "loss": 0.0752, - "step": 1504 - }, - { - "epoch": 0.3996282527881041, - "grad_norm": 1.2599751022767143, - "learning_rate": 9.400225270201347e-06, - "loss": 0.1279, - "step": 1505 - }, - { - "epoch": 0.39989378651088686, - "grad_norm": 0.39943457727783255, - "learning_rate": 9.399182280344458e-06, - "loss": 0.0286, - "step": 1506 - }, - { - "epoch": 0.4001593202336697, - "grad_norm": 0.3951237763375591, - "learning_rate": 9.398138442377112e-06, - "loss": 0.0354, - "step": 1507 - }, - { - "epoch": 0.40042485395645244, - "grad_norm": 0.3348310768931224, - "learning_rate": 9.397093756500544e-06, - "loss": 0.0258, - "step": 1508 - }, - { - "epoch": 0.40069038767923526, - "grad_norm": 0.6183702558458398, - "learning_rate": 9.396048222916159e-06, - "loss": 0.0889, - "step": 1509 - }, - { - "epoch": 0.4009559214020181, - "grad_norm": 0.7389099287695277, - "learning_rate": 9.395001841825523e-06, - "loss": 0.0957, - "step": 1510 - }, - { - "epoch": 0.40122145512480084, - "grad_norm": 0.5072802900638348, - "learning_rate": 9.393954613430365e-06, - "loss": 0.0752, - "step": 1511 - }, - { - "epoch": 0.40148698884758366, - "grad_norm": 0.6807209999699108, - "learning_rate": 9.392906537932582e-06, - "loss": 0.0957, - "step": 1512 - }, - { - "epoch": 0.4017525225703664, - "grad_norm": 0.4704376229884201, - "learning_rate": 9.391857615534226e-06, - "loss": 0.0286, - "step": 1513 - }, - { - "epoch": 0.40201805629314924, - "grad_norm": 0.028093132307861573, - "learning_rate": 9.39080784643752e-06, - "loss": 0.0006, - "step": 1514 - }, - { - "epoch": 0.402283590015932, - "grad_norm": 0.5084581122752261, - "learning_rate": 9.389757230844845e-06, - "loss": 0.0688, - "step": 1515 - }, - { - "epoch": 0.4025491237387148, - "grad_norm": 0.006193390834703029, - "learning_rate": 9.388705768958748e-06, - "loss": 0.0001, - "step": 1516 - }, - { - "epoch": 0.4028146574614976, - "grad_norm": 0.46353104622161806, - "learning_rate": 9.38765346098194e-06, - "loss": 0.0525, - "step": 1517 - }, - { - "epoch": 0.4030801911842804, - "grad_norm": 0.49616262621142954, - "learning_rate": 9.386600307117293e-06, - "loss": 0.0393, - "step": 1518 - }, - { - "epoch": 0.4033457249070632, - "grad_norm": 0.0815583003071057, - "learning_rate": 9.38554630756784e-06, - "loss": 0.0022, - "step": 1519 - }, - { - "epoch": 0.403611258629846, - "grad_norm": 0.48994190511403873, - "learning_rate": 9.384491462536783e-06, - "loss": 0.0354, - "step": 1520 - }, - { - "epoch": 0.4038767923526288, - "grad_norm": 0.11251221678106954, - "learning_rate": 9.383435772227481e-06, - "loss": 0.004, - "step": 1521 - }, - { - "epoch": 0.40414232607541156, - "grad_norm": 0.5397658046795061, - "learning_rate": 9.382379236843459e-06, - "loss": 0.0688, - "step": 1522 - }, - { - "epoch": 0.4044078597981944, - "grad_norm": 0.09364212325756312, - "learning_rate": 9.381321856588407e-06, - "loss": 0.0032, - "step": 1523 - }, - { - "epoch": 0.40467339352097714, - "grad_norm": 0.9230122195682332, - "learning_rate": 9.380263631666175e-06, - "loss": 0.0815, - "step": 1524 - }, - { - "epoch": 0.40493892724375996, - "grad_norm": 0.9314503563752995, - "learning_rate": 9.379204562280775e-06, - "loss": 0.0889, - "step": 1525 - }, - { - "epoch": 0.4052044609665427, - "grad_norm": 0.60796669683741, - "learning_rate": 9.378144648636382e-06, - "loss": 0.0688, - "step": 1526 - }, - { - "epoch": 0.40546999468932554, - "grad_norm": 0.5554197315940151, - "learning_rate": 9.377083890937337e-06, - "loss": 0.0576, - "step": 1527 - }, - { - "epoch": 0.40573552841210836, - "grad_norm": 0.12196493290704465, - "learning_rate": 9.37602228938814e-06, - "loss": 0.0028, - "step": 1528 - }, - { - "epoch": 0.4060010621348911, - "grad_norm": 0.3126139063656892, - "learning_rate": 9.374959844193456e-06, - "loss": 0.0354, - "step": 1529 - }, - { - "epoch": 0.40626659585767394, - "grad_norm": 0.011854084112567173, - "learning_rate": 9.373896555558113e-06, - "loss": 0.0003, - "step": 1530 - }, - { - "epoch": 0.4065321295804567, - "grad_norm": 3.841192190633747, - "learning_rate": 9.372832423687099e-06, - "loss": 0.2031, - "step": 1531 - }, - { - "epoch": 0.4067976633032395, - "grad_norm": 0.6503926893484147, - "learning_rate": 9.371767448785566e-06, - "loss": 0.0184, - "step": 1532 - }, - { - "epoch": 0.4070631970260223, - "grad_norm": 0.8960971685517782, - "learning_rate": 9.37070163105883e-06, - "loss": 0.0815, - "step": 1533 - }, - { - "epoch": 0.4073287307488051, - "grad_norm": 0.011994467119951549, - "learning_rate": 9.369634970712365e-06, - "loss": 0.0003, - "step": 1534 - }, - { - "epoch": 0.4075942644715879, - "grad_norm": 0.00513544623691197, - "learning_rate": 9.368567467951815e-06, - "loss": 0.0001, - "step": 1535 - }, - { - "epoch": 0.4078597981943707, - "grad_norm": 0.043827008510423704, - "learning_rate": 9.367499122982979e-06, - "loss": 0.0009, - "step": 1536 - }, - { - "epoch": 0.4081253319171535, - "grad_norm": 0.46712106544993076, - "learning_rate": 9.366429936011822e-06, - "loss": 0.0476, - "step": 1537 - }, - { - "epoch": 0.40839086563993626, - "grad_norm": 0.07965123738499298, - "learning_rate": 9.365359907244469e-06, - "loss": 0.0019, - "step": 1538 - }, - { - "epoch": 0.4086563993627191, - "grad_norm": 0.5002938341280985, - "learning_rate": 9.364289036887214e-06, - "loss": 0.0476, - "step": 1539 - }, - { - "epoch": 0.40892193308550184, - "grad_norm": 0.5932213624042697, - "learning_rate": 9.3632173251465e-06, - "loss": 0.0286, - "step": 1540 - }, - { - "epoch": 0.40918746680828466, - "grad_norm": 0.01294158924164034, - "learning_rate": 9.362144772228949e-06, - "loss": 0.0004, - "step": 1541 - }, - { - "epoch": 0.4094530005310674, - "grad_norm": 0.6159693393016078, - "learning_rate": 9.361071378341332e-06, - "loss": 0.0231, - "step": 1542 - }, - { - "epoch": 0.40971853425385024, - "grad_norm": 0.09689935660402307, - "learning_rate": 9.359997143690586e-06, - "loss": 0.0019, - "step": 1543 - }, - { - "epoch": 0.40998406797663306, - "grad_norm": 0.3554406490463387, - "learning_rate": 9.358922068483813e-06, - "loss": 0.0391, - "step": 1544 - }, - { - "epoch": 0.4102496016994158, - "grad_norm": 4.665948195663397, - "learning_rate": 9.357846152928275e-06, - "loss": 0.3535, - "step": 1545 - }, - { - "epoch": 0.41051513542219864, - "grad_norm": 0.6643692186727801, - "learning_rate": 9.356769397231391e-06, - "loss": 0.0206, - "step": 1546 - }, - { - "epoch": 0.4107806691449814, - "grad_norm": 0.3417631602945585, - "learning_rate": 9.355691801600754e-06, - "loss": 0.0354, - "step": 1547 - }, - { - "epoch": 0.4110462028677642, - "grad_norm": 0.04476331486140047, - "learning_rate": 9.354613366244108e-06, - "loss": 0.0006, - "step": 1548 - }, - { - "epoch": 0.411311736590547, - "grad_norm": 0.6284446073342773, - "learning_rate": 9.35353409136936e-06, - "loss": 0.0815, - "step": 1549 - }, - { - "epoch": 0.4115772703133298, - "grad_norm": 0.3822928656858541, - "learning_rate": 9.352453977184588e-06, - "loss": 0.0391, - "step": 1550 - }, - { - "epoch": 0.41184280403611256, - "grad_norm": 0.6046689508647543, - "learning_rate": 9.35137302389802e-06, - "loss": 0.1035, - "step": 1551 - }, - { - "epoch": 0.4121083377588954, - "grad_norm": 0.00728328734429902, - "learning_rate": 9.350291231718051e-06, - "loss": 0.0001, - "step": 1552 - }, - { - "epoch": 0.4123738714816782, - "grad_norm": 0.41563519871352395, - "learning_rate": 9.349208600853242e-06, - "loss": 0.0131, - "step": 1553 - }, - { - "epoch": 0.41263940520446096, - "grad_norm": 0.8926182157973549, - "learning_rate": 9.348125131512309e-06, - "loss": 0.0688, - "step": 1554 - }, - { - "epoch": 0.4129049389272438, - "grad_norm": 0.682749649537968, - "learning_rate": 9.34704082390413e-06, - "loss": 0.0889, - "step": 1555 - }, - { - "epoch": 0.41317047265002654, - "grad_norm": 0.004689924901466316, - "learning_rate": 9.34595567823775e-06, - "loss": 0.0001, - "step": 1556 - }, - { - "epoch": 0.41343600637280936, - "grad_norm": 0.6858612644220904, - "learning_rate": 9.344869694722372e-06, - "loss": 0.0889, - "step": 1557 - }, - { - "epoch": 0.4137015400955921, - "grad_norm": 0.4520811232153325, - "learning_rate": 9.343782873567358e-06, - "loss": 0.0391, - "step": 1558 - }, - { - "epoch": 0.41396707381837494, - "grad_norm": 0.3326484721722159, - "learning_rate": 9.342695214982236e-06, - "loss": 0.0354, - "step": 1559 - }, - { - "epoch": 0.4142326075411577, - "grad_norm": 0.3853243104104061, - "learning_rate": 9.341606719176695e-06, - "loss": 0.0432, - "step": 1560 - }, - { - "epoch": 0.4144981412639405, - "grad_norm": 0.01786226839288024, - "learning_rate": 9.340517386360584e-06, - "loss": 0.0004, - "step": 1561 - }, - { - "epoch": 0.41476367498672334, - "grad_norm": 0.014836540197822963, - "learning_rate": 9.33942721674391e-06, - "loss": 0.0003, - "step": 1562 - }, - { - "epoch": 0.4150292087095061, - "grad_norm": 0.6491829250530613, - "learning_rate": 9.33833621053685e-06, - "loss": 0.063, - "step": 1563 - }, - { - "epoch": 0.4152947424322889, - "grad_norm": 0.0008502111840714258, - "learning_rate": 9.337244367949733e-06, - "loss": 0.0, - "step": 1564 - }, - { - "epoch": 0.4155602761550717, - "grad_norm": 0.4693791006428915, - "learning_rate": 9.336151689193055e-06, - "loss": 0.0476, - "step": 1565 - }, - { - "epoch": 0.4158258098778545, - "grad_norm": 0.638806505853796, - "learning_rate": 9.335058174477472e-06, - "loss": 0.0525, - "step": 1566 - }, - { - "epoch": 0.41609134360063726, - "grad_norm": 3.7713130277807108, - "learning_rate": 9.333963824013798e-06, - "loss": 0.2246, - "step": 1567 - }, - { - "epoch": 0.4163568773234201, - "grad_norm": 0.004494212269452434, - "learning_rate": 9.332868638013016e-06, - "loss": 0.0001, - "step": 1568 - }, - { - "epoch": 0.41662241104620285, - "grad_norm": 0.20743155158952067, - "learning_rate": 9.331772616686262e-06, - "loss": 0.0051, - "step": 1569 - }, - { - "epoch": 0.41688794476898566, - "grad_norm": 0.6985128357597653, - "learning_rate": 9.330675760244834e-06, - "loss": 0.0432, - "step": 1570 - }, - { - "epoch": 0.4171534784917685, - "grad_norm": 0.9089238790009025, - "learning_rate": 9.329578068900195e-06, - "loss": 0.063, - "step": 1571 - }, - { - "epoch": 0.41741901221455124, - "grad_norm": 2.064976993600294, - "learning_rate": 9.328479542863968e-06, - "loss": 0.1455, - "step": 1572 - }, - { - "epoch": 0.41768454593733406, - "grad_norm": 0.4081315774110765, - "learning_rate": 9.327380182347937e-06, - "loss": 0.0354, - "step": 1573 - }, - { - "epoch": 0.4179500796601168, - "grad_norm": 0.34553567048811684, - "learning_rate": 9.32627998756404e-06, - "loss": 0.0354, - "step": 1574 - }, - { - "epoch": 0.41821561338289964, - "grad_norm": 0.74457240073377, - "learning_rate": 9.325178958724387e-06, - "loss": 0.0476, - "step": 1575 - }, - { - "epoch": 0.4184811471056824, - "grad_norm": 0.02077584538656709, - "learning_rate": 9.32407709604124e-06, - "loss": 0.0005, - "step": 1576 - }, - { - "epoch": 0.4187466808284652, - "grad_norm": 1.5760043575275893, - "learning_rate": 9.322974399727029e-06, - "loss": 0.1191, - "step": 1577 - }, - { - "epoch": 0.419012214551248, - "grad_norm": 0.08166506720345244, - "learning_rate": 9.321870869994336e-06, - "loss": 0.0013, - "step": 1578 - }, - { - "epoch": 0.4192777482740308, - "grad_norm": 0.9348396362017879, - "learning_rate": 9.320766507055912e-06, - "loss": 0.0688, - "step": 1579 - }, - { - "epoch": 0.4195432819968136, - "grad_norm": 0.27331732617626087, - "learning_rate": 9.319661311124664e-06, - "loss": 0.0317, - "step": 1580 - }, - { - "epoch": 0.4198088157195964, - "grad_norm": 5.750235838182297, - "learning_rate": 9.318555282413662e-06, - "loss": 0.3809, - "step": 1581 - }, - { - "epoch": 0.4200743494423792, - "grad_norm": 0.6368103985830037, - "learning_rate": 9.317448421136134e-06, - "loss": 0.0476, - "step": 1582 - }, - { - "epoch": 0.42033988316516196, - "grad_norm": 0.2827804927558854, - "learning_rate": 9.316340727505469e-06, - "loss": 0.0317, - "step": 1583 - }, - { - "epoch": 0.4206054168879448, - "grad_norm": 0.30307381764984753, - "learning_rate": 9.315232201735217e-06, - "loss": 0.0064, - "step": 1584 - }, - { - "epoch": 0.42087095061072755, - "grad_norm": 0.21252100392930384, - "learning_rate": 9.314122844039094e-06, - "loss": 0.0231, - "step": 1585 - }, - { - "epoch": 0.42113648433351036, - "grad_norm": 1.2522860606912725, - "learning_rate": 9.313012654630963e-06, - "loss": 0.1035, - "step": 1586 - }, - { - "epoch": 0.4214020180562931, - "grad_norm": 0.022875048396212985, - "learning_rate": 9.311901633724861e-06, - "loss": 0.0006, - "step": 1587 - }, - { - "epoch": 0.42166755177907594, - "grad_norm": 0.9338167878973481, - "learning_rate": 9.310789781534978e-06, - "loss": 0.0286, - "step": 1588 - }, - { - "epoch": 0.42193308550185876, - "grad_norm": 0.6025789069506893, - "learning_rate": 9.309677098275667e-06, - "loss": 0.0476, - "step": 1589 - }, - { - "epoch": 0.4221986192246415, - "grad_norm": 0.15935459440059752, - "learning_rate": 9.308563584161439e-06, - "loss": 0.0146, - "step": 1590 - }, - { - "epoch": 0.42246415294742434, - "grad_norm": 0.09990679440548342, - "learning_rate": 9.307449239406967e-06, - "loss": 0.0021, - "step": 1591 - }, - { - "epoch": 0.4227296866702071, - "grad_norm": 0.4308256760120562, - "learning_rate": 9.306334064227082e-06, - "loss": 0.0206, - "step": 1592 - }, - { - "epoch": 0.4229952203929899, - "grad_norm": 0.17557875823654812, - "learning_rate": 9.305218058836778e-06, - "loss": 0.0165, - "step": 1593 - }, - { - "epoch": 0.4232607541157727, - "grad_norm": 0.8469708090699332, - "learning_rate": 9.304101223451209e-06, - "loss": 0.0391, - "step": 1594 - }, - { - "epoch": 0.4235262878385555, - "grad_norm": 0.9742488100637656, - "learning_rate": 9.302983558285686e-06, - "loss": 0.1738, - "step": 1595 - }, - { - "epoch": 0.42379182156133827, - "grad_norm": 0.3807711068184408, - "learning_rate": 9.301865063555681e-06, - "loss": 0.0255, - "step": 1596 - }, - { - "epoch": 0.4240573552841211, - "grad_norm": 0.1865814006843838, - "learning_rate": 9.30074573947683e-06, - "loss": 0.0146, - "step": 1597 - }, - { - "epoch": 0.4243228890069039, - "grad_norm": 0.9795085871771619, - "learning_rate": 9.299625586264918e-06, - "loss": 0.1455, - "step": 1598 - }, - { - "epoch": 0.42458842272968667, - "grad_norm": 0.8224575743387007, - "learning_rate": 9.298504604135906e-06, - "loss": 0.1641, - "step": 1599 - }, - { - "epoch": 0.4248539564524695, - "grad_norm": 0.08459635905989443, - "learning_rate": 9.297382793305902e-06, - "loss": 0.0017, - "step": 1600 - }, - { - "epoch": 0.42511949017525225, - "grad_norm": 0.9247975416587106, - "learning_rate": 9.29626015399118e-06, - "loss": 0.0525, - "step": 1601 - }, - { - "epoch": 0.42538502389803506, - "grad_norm": 0.9030669338926891, - "learning_rate": 9.295136686408165e-06, - "loss": 0.0815, - "step": 1602 - }, - { - "epoch": 0.4256505576208178, - "grad_norm": 0.176488322331327, - "learning_rate": 9.294012390773457e-06, - "loss": 0.0184, - "step": 1603 - }, - { - "epoch": 0.42591609134360064, - "grad_norm": 0.7575042176122994, - "learning_rate": 9.292887267303803e-06, - "loss": 0.1113, - "step": 1604 - }, - { - "epoch": 0.4261816250663834, - "grad_norm": 2.8198240780302513, - "learning_rate": 9.291761316216115e-06, - "loss": 0.0889, - "step": 1605 - }, - { - "epoch": 0.4264471587891662, - "grad_norm": 0.80505821260424, - "learning_rate": 9.290634537727459e-06, - "loss": 0.0889, - "step": 1606 - }, - { - "epoch": 0.42671269251194904, - "grad_norm": 0.4004451010986326, - "learning_rate": 9.28950693205507e-06, - "loss": 0.0092, - "step": 1607 - }, - { - "epoch": 0.4269782262347318, - "grad_norm": 0.05174174947474149, - "learning_rate": 9.288378499416334e-06, - "loss": 0.0013, - "step": 1608 - }, - { - "epoch": 0.4272437599575146, - "grad_norm": 0.2673693573498579, - "learning_rate": 9.287249240028799e-06, - "loss": 0.0258, - "step": 1609 - }, - { - "epoch": 0.4275092936802974, - "grad_norm": 0.20068759525709715, - "learning_rate": 9.286119154110173e-06, - "loss": 0.0051, - "step": 1610 - }, - { - "epoch": 0.4277748274030802, - "grad_norm": 0.2052076895631866, - "learning_rate": 9.284988241878326e-06, - "loss": 0.0206, - "step": 1611 - }, - { - "epoch": 0.42804036112586297, - "grad_norm": 0.1826457677627445, - "learning_rate": 9.283856503551283e-06, - "loss": 0.0165, - "step": 1612 - }, - { - "epoch": 0.4283058948486458, - "grad_norm": 0.016102516202645443, - "learning_rate": 9.282723939347227e-06, - "loss": 0.0003, - "step": 1613 - }, - { - "epoch": 0.42857142857142855, - "grad_norm": 0.06556414219099314, - "learning_rate": 9.281590549484508e-06, - "loss": 0.0015, - "step": 1614 - }, - { - "epoch": 0.42883696229421137, - "grad_norm": 0.6492833856406688, - "learning_rate": 9.280456334181626e-06, - "loss": 0.1035, - "step": 1615 - }, - { - "epoch": 0.4291024960169942, - "grad_norm": 0.26676720638479, - "learning_rate": 9.279321293657248e-06, - "loss": 0.0255, - "step": 1616 - }, - { - "epoch": 0.42936802973977695, - "grad_norm": 1.0528365853705146, - "learning_rate": 9.278185428130194e-06, - "loss": 0.0476, - "step": 1617 - }, - { - "epoch": 0.42963356346255976, - "grad_norm": 0.300356633549314, - "learning_rate": 9.277048737819445e-06, - "loss": 0.0255, - "step": 1618 - }, - { - "epoch": 0.4298990971853425, - "grad_norm": 0.28681150202389294, - "learning_rate": 9.275911222944143e-06, - "loss": 0.0317, - "step": 1619 - }, - { - "epoch": 0.43016463090812534, - "grad_norm": 0.41351911300181254, - "learning_rate": 9.274772883723587e-06, - "loss": 0.0317, - "step": 1620 - }, - { - "epoch": 0.4304301646309081, - "grad_norm": 0.3435709971804024, - "learning_rate": 9.273633720377235e-06, - "loss": 0.0354, - "step": 1621 - }, - { - "epoch": 0.4306956983536909, - "grad_norm": 0.006280834272583653, - "learning_rate": 9.272493733124706e-06, - "loss": 0.0002, - "step": 1622 - }, - { - "epoch": 0.4309612320764737, - "grad_norm": 0.31013270569482426, - "learning_rate": 9.271352922185772e-06, - "loss": 0.0286, - "step": 1623 - }, - { - "epoch": 0.4312267657992565, - "grad_norm": 0.003193536264216083, - "learning_rate": 9.270211287780374e-06, - "loss": 0.0001, - "step": 1624 - }, - { - "epoch": 0.4314922995220393, - "grad_norm": 1.2572420728217566, - "learning_rate": 9.269068830128599e-06, - "loss": 0.0815, - "step": 1625 - }, - { - "epoch": 0.4317578332448221, - "grad_norm": 0.16424497737231764, - "learning_rate": 9.267925549450703e-06, - "loss": 0.0092, - "step": 1626 - }, - { - "epoch": 0.4320233669676049, - "grad_norm": 0.0051496845292951345, - "learning_rate": 9.266781445967098e-06, - "loss": 0.0001, - "step": 1627 - }, - { - "epoch": 0.43228890069038767, - "grad_norm": 0.4131524580991801, - "learning_rate": 9.26563651989835e-06, - "loss": 0.0255, - "step": 1628 - }, - { - "epoch": 0.4325544344131705, - "grad_norm": 0.005905247669898074, - "learning_rate": 9.264490771465191e-06, - "loss": 0.0001, - "step": 1629 - }, - { - "epoch": 0.43281996813595325, - "grad_norm": 0.11239806675648503, - "learning_rate": 9.263344200888507e-06, - "loss": 0.005, - "step": 1630 - }, - { - "epoch": 0.43308550185873607, - "grad_norm": 1.9861947240668674, - "learning_rate": 9.26219680838934e-06, - "loss": 0.1279, - "step": 1631 - }, - { - "epoch": 0.43335103558151883, - "grad_norm": 0.1940044319795506, - "learning_rate": 9.2610485941889e-06, - "loss": 0.0131, - "step": 1632 - }, - { - "epoch": 0.43361656930430165, - "grad_norm": 0.2272907921942844, - "learning_rate": 9.259899558508543e-06, - "loss": 0.0205, - "step": 1633 - }, - { - "epoch": 0.43388210302708446, - "grad_norm": 0.4199007338504671, - "learning_rate": 9.258749701569793e-06, - "loss": 0.0092, - "step": 1634 - }, - { - "epoch": 0.4341476367498672, - "grad_norm": 0.5230506299864369, - "learning_rate": 9.257599023594326e-06, - "loss": 0.0352, - "step": 1635 - }, - { - "epoch": 0.43441317047265005, - "grad_norm": 1.7928341347867058, - "learning_rate": 9.256447524803983e-06, - "loss": 0.2129, - "step": 1636 - }, - { - "epoch": 0.4346787041954328, - "grad_norm": 1.0771613409897511, - "learning_rate": 9.255295205420757e-06, - "loss": 0.1035, - "step": 1637 - }, - { - "epoch": 0.4349442379182156, - "grad_norm": 0.23823363260187153, - "learning_rate": 9.254142065666802e-06, - "loss": 0.0229, - "step": 1638 - }, - { - "epoch": 0.4352097716409984, - "grad_norm": 1.7503348124699936, - "learning_rate": 9.252988105764428e-06, - "loss": 0.2129, - "step": 1639 - }, - { - "epoch": 0.4354753053637812, - "grad_norm": 0.002717147032072083, - "learning_rate": 9.251833325936108e-06, - "loss": 0.0001, - "step": 1640 - }, - { - "epoch": 0.43574083908656397, - "grad_norm": 0.43583660633350435, - "learning_rate": 9.25067772640447e-06, - "loss": 0.0229, - "step": 1641 - }, - { - "epoch": 0.4360063728093468, - "grad_norm": 0.01550132302821912, - "learning_rate": 9.249521307392294e-06, - "loss": 0.0003, - "step": 1642 - }, - { - "epoch": 0.4362719065321296, - "grad_norm": 0.21256823089610236, - "learning_rate": 9.248364069122531e-06, - "loss": 0.0146, - "step": 1643 - }, - { - "epoch": 0.43653744025491237, - "grad_norm": 0.2227015636760905, - "learning_rate": 9.247206011818281e-06, - "loss": 0.0229, - "step": 1644 - }, - { - "epoch": 0.4368029739776952, - "grad_norm": 0.3421213770839244, - "learning_rate": 9.246047135702802e-06, - "loss": 0.0286, - "step": 1645 - }, - { - "epoch": 0.43706850770047795, - "grad_norm": 0.2175367153596953, - "learning_rate": 9.244887440999515e-06, - "loss": 0.0056, - "step": 1646 - }, - { - "epoch": 0.43733404142326077, - "grad_norm": 0.2561588514828441, - "learning_rate": 9.24372692793199e-06, - "loss": 0.0255, - "step": 1647 - }, - { - "epoch": 0.43759957514604353, - "grad_norm": 0.5116398168106552, - "learning_rate": 9.242565596723965e-06, - "loss": 0.0103, - "step": 1648 - }, - { - "epoch": 0.43786510886882635, - "grad_norm": 0.004941866451653809, - "learning_rate": 9.241403447599329e-06, - "loss": 0.0001, - "step": 1649 - }, - { - "epoch": 0.4381306425916091, - "grad_norm": 0.07087433757924969, - "learning_rate": 9.24024048078213e-06, - "loss": 0.0015, - "step": 1650 - }, - { - "epoch": 0.4383961763143919, - "grad_norm": 0.33106687803013696, - "learning_rate": 9.239076696496577e-06, - "loss": 0.0354, - "step": 1651 - }, - { - "epoch": 0.43866171003717475, - "grad_norm": 0.0641176743240912, - "learning_rate": 9.237912094967031e-06, - "loss": 0.0012, - "step": 1652 - }, - { - "epoch": 0.4389272437599575, - "grad_norm": 0.30760602554788097, - "learning_rate": 9.236746676418016e-06, - "loss": 0.0255, - "step": 1653 - }, - { - "epoch": 0.4391927774827403, - "grad_norm": 0.1564547333112549, - "learning_rate": 9.235580441074208e-06, - "loss": 0.0129, - "step": 1654 - }, - { - "epoch": 0.4394583112055231, - "grad_norm": 1.3722315058379413, - "learning_rate": 9.234413389160446e-06, - "loss": 0.1191, - "step": 1655 - }, - { - "epoch": 0.4397238449283059, - "grad_norm": 1.2711619824610763, - "learning_rate": 9.233245520901723e-06, - "loss": 0.0815, - "step": 1656 - }, - { - "epoch": 0.43998937865108867, - "grad_norm": 0.8074581064566986, - "learning_rate": 9.23207683652319e-06, - "loss": 0.0317, - "step": 1657 - }, - { - "epoch": 0.4402549123738715, - "grad_norm": 0.18847549880372136, - "learning_rate": 9.230907336250155e-06, - "loss": 0.0165, - "step": 1658 - }, - { - "epoch": 0.44052044609665425, - "grad_norm": 0.3653202272308303, - "learning_rate": 9.229737020308084e-06, - "loss": 0.0092, - "step": 1659 - }, - { - "epoch": 0.44078597981943707, - "grad_norm": 5.255587093506221, - "learning_rate": 9.228565888922602e-06, - "loss": 0.3984, - "step": 1660 - }, - { - "epoch": 0.4410515135422199, - "grad_norm": 0.3264226155423392, - "learning_rate": 9.227393942319487e-06, - "loss": 0.0255, - "step": 1661 - }, - { - "epoch": 0.44131704726500265, - "grad_norm": 0.34863206865018187, - "learning_rate": 9.226221180724677e-06, - "loss": 0.0205, - "step": 1662 - }, - { - "epoch": 0.44158258098778547, - "grad_norm": 0.28323093506885416, - "learning_rate": 9.225047604364268e-06, - "loss": 0.0229, - "step": 1663 - }, - { - "epoch": 0.44184811471056823, - "grad_norm": 0.17631014929000818, - "learning_rate": 9.223873213464511e-06, - "loss": 0.0115, - "step": 1664 - }, - { - "epoch": 0.44211364843335105, - "grad_norm": 1.099027808353995, - "learning_rate": 9.222698008251814e-06, - "loss": 0.0432, - "step": 1665 - }, - { - "epoch": 0.4423791821561338, - "grad_norm": 0.10350955443306387, - "learning_rate": 9.221521988952742e-06, - "loss": 0.0021, - "step": 1666 - }, - { - "epoch": 0.44264471587891663, - "grad_norm": 0.5117982387799549, - "learning_rate": 9.22034515579402e-06, - "loss": 0.0317, - "step": 1667 - }, - { - "epoch": 0.4429102496016994, - "grad_norm": 0.17219084419699793, - "learning_rate": 9.219167509002526e-06, - "loss": 0.0092, - "step": 1668 - }, - { - "epoch": 0.4431757833244822, - "grad_norm": 1.6574596891900364, - "learning_rate": 9.217989048805297e-06, - "loss": 0.0957, - "step": 1669 - }, - { - "epoch": 0.443441317047265, - "grad_norm": 1.3273618450366584, - "learning_rate": 9.216809775429524e-06, - "loss": 0.1836, - "step": 1670 - }, - { - "epoch": 0.4437068507700478, - "grad_norm": 1.7850762788827483, - "learning_rate": 9.215629689102561e-06, - "loss": 0.1113, - "step": 1671 - }, - { - "epoch": 0.4439723844928306, - "grad_norm": 0.0017107214111774336, - "learning_rate": 9.214448790051913e-06, - "loss": 0.0, - "step": 1672 - }, - { - "epoch": 0.44423791821561337, - "grad_norm": 0.28213652008092643, - "learning_rate": 9.213267078505243e-06, - "loss": 0.0072, - "step": 1673 - }, - { - "epoch": 0.4445034519383962, - "grad_norm": 0.1888622989632113, - "learning_rate": 9.21208455469037e-06, - "loss": 0.0164, - "step": 1674 - }, - { - "epoch": 0.44476898566117895, - "grad_norm": 2.354678143727365, - "learning_rate": 9.210901218835273e-06, - "loss": 0.1367, - "step": 1675 - }, - { - "epoch": 0.44503451938396177, - "grad_norm": 0.1314357064594671, - "learning_rate": 9.209717071168083e-06, - "loss": 0.0092, - "step": 1676 - }, - { - "epoch": 0.44530005310674453, - "grad_norm": 2.3597528685967464, - "learning_rate": 9.208532111917092e-06, - "loss": 0.0957, - "step": 1677 - }, - { - "epoch": 0.44556558682952735, - "grad_norm": 0.015474073285452805, - "learning_rate": 9.207346341310744e-06, - "loss": 0.0003, - "step": 1678 - }, - { - "epoch": 0.44583112055231017, - "grad_norm": 1.151474305722994, - "learning_rate": 9.20615975957764e-06, - "loss": 0.0476, - "step": 1679 - }, - { - "epoch": 0.44609665427509293, - "grad_norm": 1.5488215701473362, - "learning_rate": 9.204972366946546e-06, - "loss": 0.2246, - "step": 1680 - }, - { - "epoch": 0.44636218799787575, - "grad_norm": 0.0018658514561948273, - "learning_rate": 9.203784163646368e-06, - "loss": 0.0, - "step": 1681 - }, - { - "epoch": 0.4466277217206585, - "grad_norm": 0.006199377876020424, - "learning_rate": 9.202595149906185e-06, - "loss": 0.0002, - "step": 1682 - }, - { - "epoch": 0.44689325544344133, - "grad_norm": 0.0004928732427429256, - "learning_rate": 9.201405325955222e-06, - "loss": 0.0, - "step": 1683 - }, - { - "epoch": 0.4471587891662241, - "grad_norm": 1.3116595608924697, - "learning_rate": 9.200214692022862e-06, - "loss": 0.1367, - "step": 1684 - }, - { - "epoch": 0.4474243228890069, - "grad_norm": 0.27607253424721656, - "learning_rate": 9.199023248338648e-06, - "loss": 0.0205, - "step": 1685 - }, - { - "epoch": 0.44768985661178967, - "grad_norm": 0.8567058953910064, - "learning_rate": 9.197830995132275e-06, - "loss": 0.0206, - "step": 1686 - }, - { - "epoch": 0.4479553903345725, - "grad_norm": 3.4163189556120814, - "learning_rate": 9.196637932633593e-06, - "loss": 0.1553, - "step": 1687 - }, - { - "epoch": 0.4482209240573553, - "grad_norm": 0.5354290085411026, - "learning_rate": 9.195444061072612e-06, - "loss": 0.0352, - "step": 1688 - }, - { - "epoch": 0.44848645778013807, - "grad_norm": 0.24446326873599736, - "learning_rate": 9.194249380679498e-06, - "loss": 0.0205, - "step": 1689 - }, - { - "epoch": 0.4487519915029209, - "grad_norm": 2.035095641007781, - "learning_rate": 9.19305389168457e-06, - "loss": 0.0688, - "step": 1690 - }, - { - "epoch": 0.44901752522570365, - "grad_norm": 0.0008471435364572292, - "learning_rate": 9.191857594318304e-06, - "loss": 0.0, - "step": 1691 - }, - { - "epoch": 0.44928305894848647, - "grad_norm": 3.829454052397207, - "learning_rate": 9.190660488811332e-06, - "loss": 0.5664, - "step": 1692 - }, - { - "epoch": 0.44954859267126923, - "grad_norm": 1.1644189483281735, - "learning_rate": 9.189462575394443e-06, - "loss": 0.1113, - "step": 1693 - }, - { - "epoch": 0.44981412639405205, - "grad_norm": 0.6076990567042442, - "learning_rate": 9.188263854298578e-06, - "loss": 0.0476, - "step": 1694 - }, - { - "epoch": 0.4500796601168348, - "grad_norm": 0.22821643461061178, - "learning_rate": 9.187064325754838e-06, - "loss": 0.0229, - "step": 1695 - }, - { - "epoch": 0.45034519383961763, - "grad_norm": 0.517920311097021, - "learning_rate": 9.185863989994479e-06, - "loss": 0.0432, - "step": 1696 - }, - { - "epoch": 0.45061072756240045, - "grad_norm": 1.6939168341092024, - "learning_rate": 9.18466284724891e-06, - "loss": 0.1035, - "step": 1697 - }, - { - "epoch": 0.4508762612851832, - "grad_norm": 0.2848964703780022, - "learning_rate": 9.183460897749697e-06, - "loss": 0.0229, - "step": 1698 - }, - { - "epoch": 0.45114179500796603, - "grad_norm": 0.7200198808392976, - "learning_rate": 9.182258141728563e-06, - "loss": 0.1455, - "step": 1699 - }, - { - "epoch": 0.4514073287307488, - "grad_norm": 0.6382191231542441, - "learning_rate": 9.181054579417384e-06, - "loss": 0.0525, - "step": 1700 - }, - { - "epoch": 0.4516728624535316, - "grad_norm": 0.7739371440317542, - "learning_rate": 9.179850211048193e-06, - "loss": 0.1279, - "step": 1701 - }, - { - "epoch": 0.45193839617631437, - "grad_norm": 0.05514363697004764, - "learning_rate": 9.178645036853177e-06, - "loss": 0.0017, - "step": 1702 - }, - { - "epoch": 0.4522039298990972, - "grad_norm": 0.7474045741866328, - "learning_rate": 9.177439057064684e-06, - "loss": 0.1279, - "step": 1703 - }, - { - "epoch": 0.45246946362187995, - "grad_norm": 0.25830195125237543, - "learning_rate": 9.176232271915206e-06, - "loss": 0.0255, - "step": 1704 - }, - { - "epoch": 0.45273499734466277, - "grad_norm": 0.08723998970372462, - "learning_rate": 9.175024681637401e-06, - "loss": 0.0031, - "step": 1705 - }, - { - "epoch": 0.4530005310674456, - "grad_norm": 0.7724681106268992, - "learning_rate": 9.173816286464077e-06, - "loss": 0.0576, - "step": 1706 - }, - { - "epoch": 0.45326606479022835, - "grad_norm": 0.2962473622329823, - "learning_rate": 9.172607086628197e-06, - "loss": 0.0286, - "step": 1707 - }, - { - "epoch": 0.45353159851301117, - "grad_norm": 0.5255810866203752, - "learning_rate": 9.171397082362885e-06, - "loss": 0.0286, - "step": 1708 - }, - { - "epoch": 0.45379713223579393, - "grad_norm": 0.527170789292327, - "learning_rate": 9.170186273901411e-06, - "loss": 0.0286, - "step": 1709 - }, - { - "epoch": 0.45406266595857675, - "grad_norm": 0.38467745191339814, - "learning_rate": 9.168974661477206e-06, - "loss": 0.0206, - "step": 1710 - }, - { - "epoch": 0.4543281996813595, - "grad_norm": 0.29047817939552817, - "learning_rate": 9.167762245323855e-06, - "loss": 0.0354, - "step": 1711 - }, - { - "epoch": 0.45459373340414233, - "grad_norm": 0.834962595717288, - "learning_rate": 9.166549025675098e-06, - "loss": 0.0576, - "step": 1712 - }, - { - "epoch": 0.4548592671269251, - "grad_norm": 0.8325104633372797, - "learning_rate": 9.165335002764828e-06, - "loss": 0.0889, - "step": 1713 - }, - { - "epoch": 0.4551248008497079, - "grad_norm": 0.29406829163167375, - "learning_rate": 9.164120176827097e-06, - "loss": 0.0317, - "step": 1714 - }, - { - "epoch": 0.45539033457249073, - "grad_norm": 0.8674015109840324, - "learning_rate": 9.162904548096103e-06, - "loss": 0.1191, - "step": 1715 - }, - { - "epoch": 0.4556558682952735, - "grad_norm": 0.2980629365474458, - "learning_rate": 9.16168811680621e-06, - "loss": 0.0354, - "step": 1716 - }, - { - "epoch": 0.4559214020180563, - "grad_norm": 0.236925754930231, - "learning_rate": 9.160470883191934e-06, - "loss": 0.0286, - "step": 1717 - }, - { - "epoch": 0.4561869357408391, - "grad_norm": 0.21775609808125498, - "learning_rate": 9.159252847487939e-06, - "loss": 0.0255, - "step": 1718 - }, - { - "epoch": 0.4564524694636219, - "grad_norm": 0.05357758947992938, - "learning_rate": 9.158034009929046e-06, - "loss": 0.0019, - "step": 1719 - }, - { - "epoch": 0.45671800318640465, - "grad_norm": 0.7295630149525557, - "learning_rate": 9.156814370750238e-06, - "loss": 0.0525, - "step": 1720 - }, - { - "epoch": 0.45698353690918747, - "grad_norm": 0.06168367786760944, - "learning_rate": 9.155593930186644e-06, - "loss": 0.0024, - "step": 1721 - }, - { - "epoch": 0.45724907063197023, - "grad_norm": 1.351984217373161, - "learning_rate": 9.15437268847355e-06, - "loss": 0.0815, - "step": 1722 - }, - { - "epoch": 0.45751460435475305, - "grad_norm": 0.10497376351174942, - "learning_rate": 9.153150645846399e-06, - "loss": 0.0036, - "step": 1723 - }, - { - "epoch": 0.45778013807753587, - "grad_norm": 0.18119641858710658, - "learning_rate": 9.151927802540783e-06, - "loss": 0.0045, - "step": 1724 - }, - { - "epoch": 0.45804567180031863, - "grad_norm": 0.9837813441272035, - "learning_rate": 9.150704158792456e-06, - "loss": 0.0688, - "step": 1725 - }, - { - "epoch": 0.45831120552310145, - "grad_norm": 0.8532756218779173, - "learning_rate": 9.14947971483732e-06, - "loss": 0.0576, - "step": 1726 - }, - { - "epoch": 0.4585767392458842, - "grad_norm": 0.6798270406461934, - "learning_rate": 9.148254470911434e-06, - "loss": 0.1113, - "step": 1727 - }, - { - "epoch": 0.45884227296866703, - "grad_norm": 0.33188951195468896, - "learning_rate": 9.14702842725101e-06, - "loss": 0.0317, - "step": 1728 - }, - { - "epoch": 0.4591078066914498, - "grad_norm": 0.9547380036001121, - "learning_rate": 9.145801584092415e-06, - "loss": 0.0688, - "step": 1729 - }, - { - "epoch": 0.4593733404142326, - "grad_norm": 1.121208129214251, - "learning_rate": 9.144573941672169e-06, - "loss": 0.0317, - "step": 1730 - }, - { - "epoch": 0.4596388741370154, - "grad_norm": 0.018532306902938646, - "learning_rate": 9.143345500226948e-06, - "loss": 0.0006, - "step": 1731 - }, - { - "epoch": 0.4599044078597982, - "grad_norm": 0.6486549833157388, - "learning_rate": 9.14211625999358e-06, - "loss": 0.1279, - "step": 1732 - }, - { - "epoch": 0.460169941582581, - "grad_norm": 0.2387504469567946, - "learning_rate": 9.14088622120905e-06, - "loss": 0.0082, - "step": 1733 - }, - { - "epoch": 0.4604354753053638, - "grad_norm": 2.1639487181238826, - "learning_rate": 9.139655384110494e-06, - "loss": 0.1641, - "step": 1734 - }, - { - "epoch": 0.4607010090281466, - "grad_norm": 0.6344741659386001, - "learning_rate": 9.1384237489352e-06, - "loss": 0.1113, - "step": 1735 - }, - { - "epoch": 0.46096654275092935, - "grad_norm": 0.6929723214303033, - "learning_rate": 9.13719131592062e-06, - "loss": 0.0889, - "step": 1736 - }, - { - "epoch": 0.46123207647371217, - "grad_norm": 0.38365443204668837, - "learning_rate": 9.135958085304345e-06, - "loss": 0.0432, - "step": 1737 - }, - { - "epoch": 0.46149761019649493, - "grad_norm": 0.4578266373131249, - "learning_rate": 9.134724057324132e-06, - "loss": 0.0476, - "step": 1738 - }, - { - "epoch": 0.46176314391927775, - "grad_norm": 0.3533084463962813, - "learning_rate": 9.133489232217882e-06, - "loss": 0.0103, - "step": 1739 - }, - { - "epoch": 0.4620286776420605, - "grad_norm": 0.3101849952354096, - "learning_rate": 9.13225361022366e-06, - "loss": 0.0391, - "step": 1740 - }, - { - "epoch": 0.46229421136484333, - "grad_norm": 1.3062414453586326, - "learning_rate": 9.131017191579679e-06, - "loss": 0.1035, - "step": 1741 - }, - { - "epoch": 0.46255974508762615, - "grad_norm": 0.28116099076278234, - "learning_rate": 9.129779976524303e-06, - "loss": 0.0354, - "step": 1742 - }, - { - "epoch": 0.4628252788104089, - "grad_norm": 0.31019250951818894, - "learning_rate": 9.128541965296051e-06, - "loss": 0.0391, - "step": 1743 - }, - { - "epoch": 0.46309081253319173, - "grad_norm": 1.032116360292696, - "learning_rate": 9.127303158133602e-06, - "loss": 0.0752, - "step": 1744 - }, - { - "epoch": 0.4633563462559745, - "grad_norm": 0.6052388800690558, - "learning_rate": 9.126063555275782e-06, - "loss": 0.0576, - "step": 1745 - }, - { - "epoch": 0.4636218799787573, - "grad_norm": 0.04310564970851126, - "learning_rate": 9.12482315696157e-06, - "loss": 0.0015, - "step": 1746 - }, - { - "epoch": 0.4638874137015401, - "grad_norm": 0.7380757773397368, - "learning_rate": 9.123581963430102e-06, - "loss": 0.063, - "step": 1747 - }, - { - "epoch": 0.4641529474243229, - "grad_norm": 0.8495163211366225, - "learning_rate": 9.122339974920664e-06, - "loss": 0.0688, - "step": 1748 - }, - { - "epoch": 0.46441848114710566, - "grad_norm": 0.17953121426797672, - "learning_rate": 9.121097191672697e-06, - "loss": 0.0082, - "step": 1749 - }, - { - "epoch": 0.4646840148698885, - "grad_norm": 0.6531547283200894, - "learning_rate": 9.119853613925796e-06, - "loss": 0.1367, - "step": 1750 - }, - { - "epoch": 0.4649495485926713, - "grad_norm": 1.3480427367731387, - "learning_rate": 9.118609241919707e-06, - "loss": 0.0688, - "step": 1751 - }, - { - "epoch": 0.46521508231545405, - "grad_norm": 0.5732194141376209, - "learning_rate": 9.11736407589433e-06, - "loss": 0.1191, - "step": 1752 - }, - { - "epoch": 0.46548061603823687, - "grad_norm": 0.615762764256231, - "learning_rate": 9.11611811608972e-06, - "loss": 0.1113, - "step": 1753 - }, - { - "epoch": 0.46574614976101963, - "grad_norm": 0.7635883205751591, - "learning_rate": 9.11487136274608e-06, - "loss": 0.0693, - "step": 1754 - }, - { - "epoch": 0.46601168348380245, - "grad_norm": 0.5733271972141396, - "learning_rate": 9.113623816103775e-06, - "loss": 0.0889, - "step": 1755 - }, - { - "epoch": 0.4662772172065852, - "grad_norm": 0.23390235066147239, - "learning_rate": 9.112375476403313e-06, - "loss": 0.0317, - "step": 1756 - }, - { - "epoch": 0.46654275092936803, - "grad_norm": 0.627686461965525, - "learning_rate": 9.111126343885359e-06, - "loss": 0.0752, - "step": 1757 - }, - { - "epoch": 0.4668082846521508, - "grad_norm": 1.1290944290448022, - "learning_rate": 9.109876418790733e-06, - "loss": 0.1113, - "step": 1758 - }, - { - "epoch": 0.4670738183749336, - "grad_norm": 0.5093788174002621, - "learning_rate": 9.108625701360403e-06, - "loss": 0.0957, - "step": 1759 - }, - { - "epoch": 0.46733935209771643, - "grad_norm": 0.5396898654005425, - "learning_rate": 9.107374191835496e-06, - "loss": 0.0752, - "step": 1760 - }, - { - "epoch": 0.4676048858204992, - "grad_norm": 0.31226491832256825, - "learning_rate": 9.106121890457289e-06, - "loss": 0.0432, - "step": 1761 - }, - { - "epoch": 0.467870419543282, - "grad_norm": 0.5714355485781667, - "learning_rate": 9.104868797467206e-06, - "loss": 0.0286, - "step": 1762 - }, - { - "epoch": 0.4681359532660648, - "grad_norm": 0.2957638989705724, - "learning_rate": 9.103614913106832e-06, - "loss": 0.0432, - "step": 1763 - }, - { - "epoch": 0.4684014869888476, - "grad_norm": 0.292588618562859, - "learning_rate": 9.1023602376179e-06, - "loss": 0.0432, - "step": 1764 - }, - { - "epoch": 0.46866702071163036, - "grad_norm": 0.33751587589467996, - "learning_rate": 9.101104771242297e-06, - "loss": 0.0525, - "step": 1765 - }, - { - "epoch": 0.4689325544344132, - "grad_norm": 0.394806774742472, - "learning_rate": 9.099848514222063e-06, - "loss": 0.0579, - "step": 1766 - }, - { - "epoch": 0.469198088157196, - "grad_norm": 0.30284921769372897, - "learning_rate": 9.098591466799388e-06, - "loss": 0.0479, - "step": 1767 - }, - { - "epoch": 0.46946362187997875, - "grad_norm": 0.06033720701590172, - "learning_rate": 9.097333629216618e-06, - "loss": 0.0019, - "step": 1768 - }, - { - "epoch": 0.46972915560276157, - "grad_norm": 0.6433340242166607, - "learning_rate": 9.096075001716245e-06, - "loss": 0.082, - "step": 1769 - }, - { - "epoch": 0.46999468932554433, - "grad_norm": 0.47188366036526186, - "learning_rate": 9.094815584540922e-06, - "loss": 0.063, - "step": 1770 - }, - { - "epoch": 0.47026022304832715, - "grad_norm": 0.48271838829532376, - "learning_rate": 9.093555377933449e-06, - "loss": 0.0957, - "step": 1771 - }, - { - "epoch": 0.4705257567711099, - "grad_norm": 0.47768086513551467, - "learning_rate": 9.092294382136778e-06, - "loss": 0.0889, - "step": 1772 - }, - { - "epoch": 0.47079129049389273, - "grad_norm": 0.28143225047541554, - "learning_rate": 9.091032597394012e-06, - "loss": 0.0432, - "step": 1773 - }, - { - "epoch": 0.4710568242166755, - "grad_norm": 0.3048274878976099, - "learning_rate": 9.089770023948413e-06, - "loss": 0.0132, - "step": 1774 - }, - { - "epoch": 0.4713223579394583, - "grad_norm": 0.4934823224302044, - "learning_rate": 9.088506662043387e-06, - "loss": 0.0688, - "step": 1775 - }, - { - "epoch": 0.47158789166224113, - "grad_norm": 0.4824507932270506, - "learning_rate": 9.087242511922497e-06, - "loss": 0.0889, - "step": 1776 - }, - { - "epoch": 0.4718534253850239, - "grad_norm": 2.06956586384691, - "learning_rate": 9.085977573829453e-06, - "loss": 0.1367, - "step": 1777 - }, - { - "epoch": 0.4721189591078067, - "grad_norm": 0.16024574451614348, - "learning_rate": 9.084711848008122e-06, - "loss": 0.0065, - "step": 1778 - }, - { - "epoch": 0.4723844928305895, - "grad_norm": 0.392020268202222, - "learning_rate": 9.083445334702525e-06, - "loss": 0.0317, - "step": 1779 - }, - { - "epoch": 0.4726500265533723, - "grad_norm": 0.2638470201441458, - "learning_rate": 9.082178034156823e-06, - "loss": 0.0391, - "step": 1780 - }, - { - "epoch": 0.47291556027615506, - "grad_norm": 0.35572403768500227, - "learning_rate": 9.080909946615344e-06, - "loss": 0.0525, - "step": 1781 - }, - { - "epoch": 0.4731810939989379, - "grad_norm": 0.1322872189240058, - "learning_rate": 9.079641072322555e-06, - "loss": 0.0045, - "step": 1782 - }, - { - "epoch": 0.47344662772172064, - "grad_norm": 0.4683993996526506, - "learning_rate": 9.078371411523085e-06, - "loss": 0.0889, - "step": 1783 - }, - { - "epoch": 0.47371216144450345, - "grad_norm": 0.05846428329547589, - "learning_rate": 9.077100964461705e-06, - "loss": 0.0017, - "step": 1784 - }, - { - "epoch": 0.4739776951672863, - "grad_norm": 0.4950840613938997, - "learning_rate": 9.075829731383342e-06, - "loss": 0.082, - "step": 1785 - }, - { - "epoch": 0.47424322889006904, - "grad_norm": 0.409989728413392, - "learning_rate": 9.07455771253308e-06, - "loss": 0.0525, - "step": 1786 - }, - { - "epoch": 0.47450876261285185, - "grad_norm": 0.540891443973615, - "learning_rate": 9.073284908156145e-06, - "loss": 0.0476, - "step": 1787 - }, - { - "epoch": 0.4747742963356346, - "grad_norm": 0.013696281162261802, - "learning_rate": 9.07201131849792e-06, - "loss": 0.0004, - "step": 1788 - }, - { - "epoch": 0.47503983005841743, - "grad_norm": 0.23943299662174408, - "learning_rate": 9.070736943803937e-06, - "loss": 0.0104, - "step": 1789 - }, - { - "epoch": 0.4753053637812002, - "grad_norm": 0.4895048851505992, - "learning_rate": 9.069461784319882e-06, - "loss": 0.0432, - "step": 1790 - }, - { - "epoch": 0.475570897503983, - "grad_norm": 0.10004712225856449, - "learning_rate": 9.068185840291588e-06, - "loss": 0.0025, - "step": 1791 - }, - { - "epoch": 0.4758364312267658, - "grad_norm": 0.159183886065426, - "learning_rate": 9.066909111965046e-06, - "loss": 0.0041, - "step": 1792 - }, - { - "epoch": 0.4761019649495486, - "grad_norm": 0.5751229116192297, - "learning_rate": 9.065631599586392e-06, - "loss": 0.0752, - "step": 1793 - }, - { - "epoch": 0.4763674986723314, - "grad_norm": 0.2988995897362944, - "learning_rate": 9.064353303401912e-06, - "loss": 0.0432, - "step": 1794 - }, - { - "epoch": 0.4766330323951142, - "grad_norm": 0.42552366259459906, - "learning_rate": 9.063074223658052e-06, - "loss": 0.0693, - "step": 1795 - }, - { - "epoch": 0.476898566117897, - "grad_norm": 1.4837334028705988, - "learning_rate": 9.061794360601401e-06, - "loss": 0.1113, - "step": 1796 - }, - { - "epoch": 0.47716409984067976, - "grad_norm": 0.6192662336206393, - "learning_rate": 9.060513714478701e-06, - "loss": 0.0752, - "step": 1797 - }, - { - "epoch": 0.4774296335634626, - "grad_norm": 0.321229007267076, - "learning_rate": 9.059232285536848e-06, - "loss": 0.0476, - "step": 1798 - }, - { - "epoch": 0.47769516728624534, - "grad_norm": 0.2890108799839838, - "learning_rate": 9.057950074022884e-06, - "loss": 0.0432, - "step": 1799 - }, - { - "epoch": 0.47796070100902815, - "grad_norm": 0.07149010496675735, - "learning_rate": 9.056667080184004e-06, - "loss": 0.0025, - "step": 1800 - }, - { - "epoch": 0.4782262347318109, - "grad_norm": 0.39389094255878604, - "learning_rate": 9.055383304267555e-06, - "loss": 0.0258, - "step": 1801 - }, - { - "epoch": 0.47849176845459374, - "grad_norm": 0.33439676094633747, - "learning_rate": 9.054098746521033e-06, - "loss": 0.0479, - "step": 1802 - }, - { - "epoch": 0.47875730217737655, - "grad_norm": 0.4026126537880819, - "learning_rate": 9.052813407192087e-06, - "loss": 0.0525, - "step": 1803 - }, - { - "epoch": 0.4790228359001593, - "grad_norm": 0.13383065541461073, - "learning_rate": 9.051527286528514e-06, - "loss": 0.004, - "step": 1804 - }, - { - "epoch": 0.47928836962294213, - "grad_norm": 0.45913557201091415, - "learning_rate": 9.050240384778265e-06, - "loss": 0.0525, - "step": 1805 - }, - { - "epoch": 0.4795539033457249, - "grad_norm": 0.32449255612491684, - "learning_rate": 9.048952702189437e-06, - "loss": 0.0131, - "step": 1806 - }, - { - "epoch": 0.4798194370685077, - "grad_norm": 0.23468227496683847, - "learning_rate": 9.047664239010281e-06, - "loss": 0.0317, - "step": 1807 - }, - { - "epoch": 0.4800849707912905, - "grad_norm": 0.4626085648207346, - "learning_rate": 9.046374995489197e-06, - "loss": 0.0576, - "step": 1808 - }, - { - "epoch": 0.4803505045140733, - "grad_norm": 0.2436457613182265, - "learning_rate": 9.045084971874738e-06, - "loss": 0.0354, - "step": 1809 - }, - { - "epoch": 0.48061603823685606, - "grad_norm": 0.6559558699245994, - "learning_rate": 9.043794168415603e-06, - "loss": 0.0206, - "step": 1810 - }, - { - "epoch": 0.4808815719596389, - "grad_norm": 1.0632918592533553, - "learning_rate": 9.042502585360644e-06, - "loss": 0.0815, - "step": 1811 - }, - { - "epoch": 0.4811471056824217, - "grad_norm": 0.24478371121705456, - "learning_rate": 9.041210222958863e-06, - "loss": 0.0286, - "step": 1812 - }, - { - "epoch": 0.48141263940520446, - "grad_norm": 1.4444380543418034, - "learning_rate": 9.039917081459415e-06, - "loss": 0.0986, - "step": 1813 - }, - { - "epoch": 0.4816781731279873, - "grad_norm": 0.7045925020545539, - "learning_rate": 9.038623161111597e-06, - "loss": 0.1035, - "step": 1814 - }, - { - "epoch": 0.48194370685077004, - "grad_norm": 0.6197386448287133, - "learning_rate": 9.037328462164866e-06, - "loss": 0.1113, - "step": 1815 - }, - { - "epoch": 0.48220924057355286, - "grad_norm": 0.1857729974346812, - "learning_rate": 9.036032984868825e-06, - "loss": 0.0206, - "step": 1816 - }, - { - "epoch": 0.4824747742963356, - "grad_norm": 0.24507656947520706, - "learning_rate": 9.034736729473223e-06, - "loss": 0.0286, - "step": 1817 - }, - { - "epoch": 0.48274030801911844, - "grad_norm": 0.18927032479525302, - "learning_rate": 9.033439696227966e-06, - "loss": 0.0258, - "step": 1818 - }, - { - "epoch": 0.4830058417419012, - "grad_norm": 0.06063697384987017, - "learning_rate": 9.032141885383106e-06, - "loss": 0.0017, - "step": 1819 - }, - { - "epoch": 0.483271375464684, - "grad_norm": 0.22908240400686025, - "learning_rate": 9.030843297188842e-06, - "loss": 0.0286, - "step": 1820 - }, - { - "epoch": 0.48353690918746683, - "grad_norm": 0.029382370073099087, - "learning_rate": 9.029543931895533e-06, - "loss": 0.0007, - "step": 1821 - }, - { - "epoch": 0.4838024429102496, - "grad_norm": 4.222452192211459, - "learning_rate": 9.028243789753676e-06, - "loss": 0.2129, - "step": 1822 - }, - { - "epoch": 0.4840679766330324, - "grad_norm": 0.2060789874561621, - "learning_rate": 9.026942871013927e-06, - "loss": 0.0057, - "step": 1823 - }, - { - "epoch": 0.4843335103558152, - "grad_norm": 0.6395860695177051, - "learning_rate": 9.025641175927084e-06, - "loss": 0.0525, - "step": 1824 - }, - { - "epoch": 0.484599044078598, - "grad_norm": 0.6417176946489609, - "learning_rate": 9.024338704744102e-06, - "loss": 0.0476, - "step": 1825 - }, - { - "epoch": 0.48486457780138076, - "grad_norm": 0.1480376723553451, - "learning_rate": 9.02303545771608e-06, - "loss": 0.0184, - "step": 1826 - }, - { - "epoch": 0.4851301115241636, - "grad_norm": 1.0502927779842128, - "learning_rate": 9.021731435094267e-06, - "loss": 0.1367, - "step": 1827 - }, - { - "epoch": 0.48539564524694634, - "grad_norm": 0.4100889619332376, - "learning_rate": 9.020426637130069e-06, - "loss": 0.0184, - "step": 1828 - }, - { - "epoch": 0.48566117896972916, - "grad_norm": 1.263025388192655, - "learning_rate": 9.01912106407503e-06, - "loss": 0.1035, - "step": 1829 - }, - { - "epoch": 0.485926712692512, - "grad_norm": 0.6821914603573469, - "learning_rate": 9.01781471618085e-06, - "loss": 0.1641, - "step": 1830 - }, - { - "epoch": 0.48619224641529474, - "grad_norm": 0.1830730022933736, - "learning_rate": 9.016507593699379e-06, - "loss": 0.0231, - "step": 1831 - }, - { - "epoch": 0.48645778013807756, - "grad_norm": 0.1500688928562703, - "learning_rate": 9.015199696882614e-06, - "loss": 0.0184, - "step": 1832 - }, - { - "epoch": 0.4867233138608603, - "grad_norm": 0.19372694562652476, - "learning_rate": 9.013891025982704e-06, - "loss": 0.0231, - "step": 1833 - }, - { - "epoch": 0.48698884758364314, - "grad_norm": 0.7571475485333456, - "learning_rate": 9.01258158125194e-06, - "loss": 0.1279, - "step": 1834 - }, - { - "epoch": 0.4872543813064259, - "grad_norm": 0.6523517733514138, - "learning_rate": 9.011271362942774e-06, - "loss": 0.1279, - "step": 1835 - }, - { - "epoch": 0.4875199150292087, - "grad_norm": 0.2681899167683771, - "learning_rate": 9.009960371307798e-06, - "loss": 0.0286, - "step": 1836 - }, - { - "epoch": 0.4877854487519915, - "grad_norm": 0.7058528797659482, - "learning_rate": 9.008648606599755e-06, - "loss": 0.1455, - "step": 1837 - }, - { - "epoch": 0.4880509824747743, - "grad_norm": 0.12442443775061221, - "learning_rate": 9.007336069071537e-06, - "loss": 0.0039, - "step": 1838 - }, - { - "epoch": 0.4883165161975571, - "grad_norm": 0.20140444385699593, - "learning_rate": 9.00602275897619e-06, - "loss": 0.0286, - "step": 1839 - }, - { - "epoch": 0.4885820499203399, - "grad_norm": 0.5373289060422746, - "learning_rate": 9.0047086765669e-06, - "loss": 0.1113, - "step": 1840 - }, - { - "epoch": 0.4888475836431227, - "grad_norm": 0.705010285755384, - "learning_rate": 9.003393822097005e-06, - "loss": 0.0576, - "step": 1841 - }, - { - "epoch": 0.48911311736590546, - "grad_norm": 0.20375285524703615, - "learning_rate": 9.002078195820002e-06, - "loss": 0.0286, - "step": 1842 - }, - { - "epoch": 0.4893786510886883, - "grad_norm": 0.2973123126251027, - "learning_rate": 9.000761797989521e-06, - "loss": 0.0391, - "step": 1843 - }, - { - "epoch": 0.48964418481147104, - "grad_norm": 0.5261967824579288, - "learning_rate": 8.99944462885935e-06, - "loss": 0.0391, - "step": 1844 - }, - { - "epoch": 0.48990971853425386, - "grad_norm": 0.641071008582391, - "learning_rate": 8.998126688683423e-06, - "loss": 0.0957, - "step": 1845 - }, - { - "epoch": 0.4901752522570366, - "grad_norm": 2.8556542247006456, - "learning_rate": 8.996807977715824e-06, - "loss": 0.1836, - "step": 1846 - }, - { - "epoch": 0.49044078597981944, - "grad_norm": 0.21588840632781164, - "learning_rate": 8.995488496210785e-06, - "loss": 0.0317, - "step": 1847 - }, - { - "epoch": 0.49070631970260226, - "grad_norm": 0.5200930577904636, - "learning_rate": 8.994168244422687e-06, - "loss": 0.0957, - "step": 1848 - }, - { - "epoch": 0.490971853425385, - "grad_norm": 0.5946617117384666, - "learning_rate": 8.992847222606059e-06, - "loss": 0.0815, - "step": 1849 - }, - { - "epoch": 0.49123738714816784, - "grad_norm": 0.5329049241163124, - "learning_rate": 8.991525431015575e-06, - "loss": 0.1035, - "step": 1850 - }, - { - "epoch": 0.4915029208709506, - "grad_norm": 0.13421039859666778, - "learning_rate": 8.990202869906067e-06, - "loss": 0.0038, - "step": 1851 - }, - { - "epoch": 0.4917684545937334, - "grad_norm": 0.21205384524729126, - "learning_rate": 8.988879539532502e-06, - "loss": 0.0035, - "step": 1852 - }, - { - "epoch": 0.4920339883165162, - "grad_norm": 0.3961762407408791, - "learning_rate": 8.987555440150009e-06, - "loss": 0.0576, - "step": 1853 - }, - { - "epoch": 0.492299522039299, - "grad_norm": 0.5124722957876611, - "learning_rate": 8.986230572013856e-06, - "loss": 0.0688, - "step": 1854 - }, - { - "epoch": 0.49256505576208176, - "grad_norm": 0.31830392157316406, - "learning_rate": 8.98490493537946e-06, - "loss": 0.0476, - "step": 1855 - }, - { - "epoch": 0.4928305894848646, - "grad_norm": 0.4475338780438215, - "learning_rate": 8.983578530502391e-06, - "loss": 0.0688, - "step": 1856 - }, - { - "epoch": 0.4930961232076474, - "grad_norm": 0.5131627119793176, - "learning_rate": 8.982251357638366e-06, - "loss": 0.0432, - "step": 1857 - }, - { - "epoch": 0.49336165693043016, - "grad_norm": 0.03746990293964691, - "learning_rate": 8.980923417043241e-06, - "loss": 0.001, - "step": 1858 - }, - { - "epoch": 0.493627190653213, - "grad_norm": 0.37701188868972424, - "learning_rate": 8.979594708973034e-06, - "loss": 0.0576, - "step": 1859 - }, - { - "epoch": 0.49389272437599574, - "grad_norm": 0.3246063141561153, - "learning_rate": 8.978265233683903e-06, - "loss": 0.0476, - "step": 1860 - }, - { - "epoch": 0.49415825809877856, - "grad_norm": 0.2529206787160113, - "learning_rate": 8.976934991432153e-06, - "loss": 0.0117, - "step": 1861 - }, - { - "epoch": 0.4944237918215613, - "grad_norm": 0.4707397561581439, - "learning_rate": 8.97560398247424e-06, - "loss": 0.0479, - "step": 1862 - }, - { - "epoch": 0.49468932554434414, - "grad_norm": 0.35709255896991166, - "learning_rate": 8.974272207066767e-06, - "loss": 0.0525, - "step": 1863 - }, - { - "epoch": 0.4949548592671269, - "grad_norm": 0.35165939869522234, - "learning_rate": 8.972939665466485e-06, - "loss": 0.0525, - "step": 1864 - }, - { - "epoch": 0.4952203929899097, - "grad_norm": 0.44447870911726006, - "learning_rate": 8.971606357930293e-06, - "loss": 0.0576, - "step": 1865 - }, - { - "epoch": 0.49548592671269254, - "grad_norm": 0.01898523611296749, - "learning_rate": 8.970272284715234e-06, - "loss": 0.0005, - "step": 1866 - }, - { - "epoch": 0.4957514604354753, - "grad_norm": 0.4586388691281053, - "learning_rate": 8.968937446078505e-06, - "loss": 0.0688, - "step": 1867 - }, - { - "epoch": 0.4960169941582581, - "grad_norm": 0.35720555993159125, - "learning_rate": 8.967601842277445e-06, - "loss": 0.0525, - "step": 1868 - }, - { - "epoch": 0.4962825278810409, - "grad_norm": 0.36644379984601816, - "learning_rate": 8.966265473569545e-06, - "loss": 0.0525, - "step": 1869 - }, - { - "epoch": 0.4965480616038237, - "grad_norm": 0.011010370117200359, - "learning_rate": 8.964928340212436e-06, - "loss": 0.0003, - "step": 1870 - }, - { - "epoch": 0.49681359532660646, - "grad_norm": 0.5993583089811542, - "learning_rate": 8.963590442463907e-06, - "loss": 0.082, - "step": 1871 - }, - { - "epoch": 0.4970791290493893, - "grad_norm": 0.4181666595587938, - "learning_rate": 8.962251780581888e-06, - "loss": 0.0354, - "step": 1872 - }, - { - "epoch": 0.49734466277217204, - "grad_norm": 0.4221668256111686, - "learning_rate": 8.960912354824455e-06, - "loss": 0.0393, - "step": 1873 - }, - { - "epoch": 0.49761019649495486, - "grad_norm": 0.3977116516433932, - "learning_rate": 8.959572165449834e-06, - "loss": 0.0576, - "step": 1874 - }, - { - "epoch": 0.4978757302177377, - "grad_norm": 0.010735836440220152, - "learning_rate": 8.9582312127164e-06, - "loss": 0.0003, - "step": 1875 - }, - { - "epoch": 0.49814126394052044, - "grad_norm": 0.27808107918242736, - "learning_rate": 8.95688949688267e-06, - "loss": 0.0354, - "step": 1876 - }, - { - "epoch": 0.49840679766330326, - "grad_norm": 0.0016704416964712005, - "learning_rate": 8.955547018207311e-06, - "loss": 0.0, - "step": 1877 - }, - { - "epoch": 0.498672331386086, - "grad_norm": 0.5686188793893922, - "learning_rate": 8.954203776949141e-06, - "loss": 0.0476, - "step": 1878 - }, - { - "epoch": 0.49893786510886884, - "grad_norm": 0.39056710727137667, - "learning_rate": 8.952859773367117e-06, - "loss": 0.0525, - "step": 1879 - }, - { - "epoch": 0.4992033988316516, - "grad_norm": 0.512461057922991, - "learning_rate": 8.951515007720348e-06, - "loss": 0.0525, - "step": 1880 - }, - { - "epoch": 0.4994689325544344, - "grad_norm": 0.07275683481894793, - "learning_rate": 8.950169480268089e-06, - "loss": 0.0022, - "step": 1881 - }, - { - "epoch": 0.4997344662772172, - "grad_norm": 0.5906991505102981, - "learning_rate": 8.948823191269742e-06, - "loss": 0.063, - "step": 1882 - }, - { - "epoch": 0.5, - "grad_norm": 0.6168155161242778, - "learning_rate": 8.947476140984856e-06, - "loss": 0.0688, - "step": 1883 - }, - { - "epoch": 0.5002655337227828, - "grad_norm": 0.5204598871312067, - "learning_rate": 8.946128329673126e-06, - "loss": 0.0752, - "step": 1884 - }, - { - "epoch": 0.5005310674455656, - "grad_norm": 1.8571138477453206, - "learning_rate": 8.944779757594397e-06, - "loss": 0.2129, - "step": 1885 - }, - { - "epoch": 0.5007966011683483, - "grad_norm": 0.49090367613182434, - "learning_rate": 8.943430425008653e-06, - "loss": 0.063, - "step": 1886 - }, - { - "epoch": 0.5010621348911312, - "grad_norm": 0.2501068284298111, - "learning_rate": 8.942080332176031e-06, - "loss": 0.0286, - "step": 1887 - }, - { - "epoch": 0.501327668613914, - "grad_norm": 0.19234786774639814, - "learning_rate": 8.940729479356814e-06, - "loss": 0.0073, - "step": 1888 - }, - { - "epoch": 0.5015932023366968, - "grad_norm": 1.5237924486409677, - "learning_rate": 8.93937786681143e-06, - "loss": 0.0889, - "step": 1889 - }, - { - "epoch": 0.5018587360594795, - "grad_norm": 0.5459929255095108, - "learning_rate": 8.938025494800454e-06, - "loss": 0.063, - "step": 1890 - }, - { - "epoch": 0.5021242697822623, - "grad_norm": 0.25938046401016135, - "learning_rate": 8.936672363584607e-06, - "loss": 0.0354, - "step": 1891 - }, - { - "epoch": 0.5023898035050451, - "grad_norm": 0.32388794224012313, - "learning_rate": 8.935318473424756e-06, - "loss": 0.0432, - "step": 1892 - }, - { - "epoch": 0.502655337227828, - "grad_norm": 0.526908944612307, - "learning_rate": 8.933963824581919e-06, - "loss": 0.0693, - "step": 1893 - }, - { - "epoch": 0.5029208709506108, - "grad_norm": 0.27900639395149873, - "learning_rate": 8.932608417317253e-06, - "loss": 0.0092, - "step": 1894 - }, - { - "epoch": 0.5031864046733935, - "grad_norm": 0.3445447813472986, - "learning_rate": 8.931252251892061e-06, - "loss": 0.0354, - "step": 1895 - }, - { - "epoch": 0.5034519383961763, - "grad_norm": 0.44933449962511285, - "learning_rate": 8.929895328567804e-06, - "loss": 0.0476, - "step": 1896 - }, - { - "epoch": 0.5037174721189591, - "grad_norm": 0.03865608508633661, - "learning_rate": 8.928537647606074e-06, - "loss": 0.001, - "step": 1897 - }, - { - "epoch": 0.5039830058417419, - "grad_norm": 0.20536476890220312, - "learning_rate": 8.92717920926862e-06, - "loss": 0.0258, - "step": 1898 - }, - { - "epoch": 0.5042485395645246, - "grad_norm": 0.2603825837594336, - "learning_rate": 8.92582001381733e-06, - "loss": 0.0317, - "step": 1899 - }, - { - "epoch": 0.5045140732873075, - "grad_norm": 0.664354927635177, - "learning_rate": 8.924460061514241e-06, - "loss": 0.1553, - "step": 1900 - }, - { - "epoch": 0.5047796070100903, - "grad_norm": 0.1417729671988901, - "learning_rate": 8.92309935262154e-06, - "loss": 0.0165, - "step": 1901 - }, - { - "epoch": 0.5050451407328731, - "grad_norm": 0.7573282597398386, - "learning_rate": 8.92173788740155e-06, - "loss": 0.0391, - "step": 1902 - }, - { - "epoch": 0.5053106744556559, - "grad_norm": 0.6651934597634159, - "learning_rate": 8.920375666116749e-06, - "loss": 0.1367, - "step": 1903 - }, - { - "epoch": 0.5055762081784386, - "grad_norm": 0.630368828497892, - "learning_rate": 8.919012689029756e-06, - "loss": 0.1279, - "step": 1904 - }, - { - "epoch": 0.5058417419012214, - "grad_norm": 0.011800849809816667, - "learning_rate": 8.917648956403338e-06, - "loss": 0.0003, - "step": 1905 - }, - { - "epoch": 0.5061072756240043, - "grad_norm": 0.1916335585910718, - "learning_rate": 8.916284468500404e-06, - "loss": 0.0231, - "step": 1906 - }, - { - "epoch": 0.5063728093467871, - "grad_norm": 0.1990089576514881, - "learning_rate": 8.914919225584014e-06, - "loss": 0.0258, - "step": 1907 - }, - { - "epoch": 0.5066383430695698, - "grad_norm": 0.42788748968525386, - "learning_rate": 8.913553227917366e-06, - "loss": 0.0165, - "step": 1908 - }, - { - "epoch": 0.5069038767923526, - "grad_norm": 0.6043691154555516, - "learning_rate": 8.912186475763815e-06, - "loss": 0.1113, - "step": 1909 - }, - { - "epoch": 0.5071694105151354, - "grad_norm": 0.20100736554436516, - "learning_rate": 8.910818969386852e-06, - "loss": 0.0258, - "step": 1910 - }, - { - "epoch": 0.5074349442379182, - "grad_norm": 0.2668002216539039, - "learning_rate": 8.909450709050115e-06, - "loss": 0.0067, - "step": 1911 - }, - { - "epoch": 0.5077004779607011, - "grad_norm": 0.25391486889680465, - "learning_rate": 8.908081695017388e-06, - "loss": 0.0286, - "step": 1912 - }, - { - "epoch": 0.5079660116834838, - "grad_norm": 0.10326740021263013, - "learning_rate": 8.906711927552601e-06, - "loss": 0.0022, - "step": 1913 - }, - { - "epoch": 0.5082315454062666, - "grad_norm": 0.7183992803108435, - "learning_rate": 8.905341406919832e-06, - "loss": 0.1455, - "step": 1914 - }, - { - "epoch": 0.5084970791290494, - "grad_norm": 0.24497092264223638, - "learning_rate": 8.903970133383297e-06, - "loss": 0.0286, - "step": 1915 - }, - { - "epoch": 0.5087626128518322, - "grad_norm": 0.5045822122027199, - "learning_rate": 8.902598107207364e-06, - "loss": 0.0476, - "step": 1916 - }, - { - "epoch": 0.5090281465746149, - "grad_norm": 0.21870695038083438, - "learning_rate": 8.901225328656543e-06, - "loss": 0.0258, - "step": 1917 - }, - { - "epoch": 0.5092936802973977, - "grad_norm": 0.006806934116766017, - "learning_rate": 8.89985179799549e-06, - "loss": 0.0002, - "step": 1918 - }, - { - "epoch": 0.5095592140201806, - "grad_norm": 0.06217815771623346, - "learning_rate": 8.898477515489003e-06, - "loss": 0.0013, - "step": 1919 - }, - { - "epoch": 0.5098247477429634, - "grad_norm": 0.15502279422567414, - "learning_rate": 8.897102481402031e-06, - "loss": 0.0036, - "step": 1920 - }, - { - "epoch": 0.5100902814657462, - "grad_norm": 0.2254494341774315, - "learning_rate": 8.895726695999663e-06, - "loss": 0.0286, - "step": 1921 - }, - { - "epoch": 0.5103558151885289, - "grad_norm": 4.048223290258338, - "learning_rate": 8.894350159547132e-06, - "loss": 0.2559, - "step": 1922 - }, - { - "epoch": 0.5106213489113117, - "grad_norm": 0.2224313022582158, - "learning_rate": 8.892972872309821e-06, - "loss": 0.0258, - "step": 1923 - }, - { - "epoch": 0.5108868826340945, - "grad_norm": 0.2649153994192695, - "learning_rate": 8.891594834553252e-06, - "loss": 0.0317, - "step": 1924 - }, - { - "epoch": 0.5111524163568774, - "grad_norm": 0.32381344159451636, - "learning_rate": 8.890216046543099e-06, - "loss": 0.0354, - "step": 1925 - }, - { - "epoch": 0.5114179500796601, - "grad_norm": 0.18911967304759786, - "learning_rate": 8.888836508545172e-06, - "loss": 0.0258, - "step": 1926 - }, - { - "epoch": 0.5116834838024429, - "grad_norm": 0.15794943866572278, - "learning_rate": 8.887456220825429e-06, - "loss": 0.0206, - "step": 1927 - }, - { - "epoch": 0.5119490175252257, - "grad_norm": 0.8298787508237971, - "learning_rate": 8.886075183649976e-06, - "loss": 0.0889, - "step": 1928 - }, - { - "epoch": 0.5122145512480085, - "grad_norm": 0.14098262352173152, - "learning_rate": 8.884693397285058e-06, - "loss": 0.0045, - "step": 1929 - }, - { - "epoch": 0.5124800849707913, - "grad_norm": 0.5112261546913817, - "learning_rate": 8.883310861997073e-06, - "loss": 0.0391, - "step": 1930 - }, - { - "epoch": 0.512745618693574, - "grad_norm": 0.0710276758168645, - "learning_rate": 8.881927578052549e-06, - "loss": 0.0014, - "step": 1931 - }, - { - "epoch": 0.5130111524163569, - "grad_norm": 0.7156148636827978, - "learning_rate": 8.880543545718173e-06, - "loss": 0.1191, - "step": 1932 - }, - { - "epoch": 0.5132766861391397, - "grad_norm": 0.7498314694324334, - "learning_rate": 8.879158765260767e-06, - "loss": 0.1279, - "step": 1933 - }, - { - "epoch": 0.5135422198619225, - "grad_norm": 0.16462904141113316, - "learning_rate": 8.877773236947302e-06, - "loss": 0.0206, - "step": 1934 - }, - { - "epoch": 0.5138077535847052, - "grad_norm": 0.8601863622602933, - "learning_rate": 8.876386961044892e-06, - "loss": 0.0752, - "step": 1935 - }, - { - "epoch": 0.514073287307488, - "grad_norm": 0.6362870475286894, - "learning_rate": 8.874999937820793e-06, - "loss": 0.1279, - "step": 1936 - }, - { - "epoch": 0.5143388210302708, - "grad_norm": 0.1624202878388607, - "learning_rate": 8.873612167542408e-06, - "loss": 0.0206, - "step": 1937 - }, - { - "epoch": 0.5146043547530537, - "grad_norm": 0.15596786935599946, - "learning_rate": 8.87222365047728e-06, - "loss": 0.0206, - "step": 1938 - }, - { - "epoch": 0.5148698884758365, - "grad_norm": 1.0578529242706238, - "learning_rate": 8.870834386893101e-06, - "loss": 0.0815, - "step": 1939 - }, - { - "epoch": 0.5151354221986192, - "grad_norm": 0.32309548985685693, - "learning_rate": 8.869444377057706e-06, - "loss": 0.0105, - "step": 1940 - }, - { - "epoch": 0.515400955921402, - "grad_norm": 0.6279202187077859, - "learning_rate": 8.868053621239072e-06, - "loss": 0.1279, - "step": 1941 - }, - { - "epoch": 0.5156664896441848, - "grad_norm": 0.29499114303717844, - "learning_rate": 8.866662119705317e-06, - "loss": 0.0391, - "step": 1942 - }, - { - "epoch": 0.5159320233669676, - "grad_norm": 0.22730083243000213, - "learning_rate": 8.865269872724708e-06, - "loss": 0.0286, - "step": 1943 - }, - { - "epoch": 0.5161975570897503, - "grad_norm": 0.8126774785685559, - "learning_rate": 8.863876880565656e-06, - "loss": 0.0286, - "step": 1944 - }, - { - "epoch": 0.5164630908125332, - "grad_norm": 0.5189987862432444, - "learning_rate": 8.862483143496713e-06, - "loss": 0.0286, - "step": 1945 - }, - { - "epoch": 0.516728624535316, - "grad_norm": 0.3079203367541235, - "learning_rate": 8.86108866178657e-06, - "loss": 0.0354, - "step": 1946 - }, - { - "epoch": 0.5169941582580988, - "grad_norm": 0.30146182888816936, - "learning_rate": 8.859693435704073e-06, - "loss": 0.0391, - "step": 1947 - }, - { - "epoch": 0.5172596919808816, - "grad_norm": 0.18766405767921446, - "learning_rate": 8.858297465518201e-06, - "loss": 0.0258, - "step": 1948 - }, - { - "epoch": 0.5175252257036643, - "grad_norm": 0.48345252694955393, - "learning_rate": 8.856900751498083e-06, - "loss": 0.0476, - "step": 1949 - }, - { - "epoch": 0.5177907594264471, - "grad_norm": 0.28999764767448305, - "learning_rate": 8.855503293912987e-06, - "loss": 0.0354, - "step": 1950 - }, - { - "epoch": 0.51805629314923, - "grad_norm": 0.301468091100063, - "learning_rate": 8.854105093032328e-06, - "loss": 0.0354, - "step": 1951 - }, - { - "epoch": 0.5183218268720128, - "grad_norm": 0.22535390470370426, - "learning_rate": 8.852706149125662e-06, - "loss": 0.0286, - "step": 1952 - }, - { - "epoch": 0.5185873605947955, - "grad_norm": 0.2907903784108349, - "learning_rate": 8.851306462462689e-06, - "loss": 0.0317, - "step": 1953 - }, - { - "epoch": 0.5188528943175783, - "grad_norm": 0.6566340180383853, - "learning_rate": 8.849906033313251e-06, - "loss": 0.0525, - "step": 1954 - }, - { - "epoch": 0.5191184280403611, - "grad_norm": 0.3414604558845798, - "learning_rate": 8.848504861947335e-06, - "loss": 0.0354, - "step": 1955 - }, - { - "epoch": 0.5193839617631439, - "grad_norm": 0.15095251988649963, - "learning_rate": 8.847102948635074e-06, - "loss": 0.0184, - "step": 1956 - }, - { - "epoch": 0.5196494954859268, - "grad_norm": 0.8850305174499467, - "learning_rate": 8.845700293646733e-06, - "loss": 0.063, - "step": 1957 - }, - { - "epoch": 0.5199150292087095, - "grad_norm": 0.18094733973006374, - "learning_rate": 8.844296897252733e-06, - "loss": 0.0039, - "step": 1958 - }, - { - "epoch": 0.5201805629314923, - "grad_norm": 0.0024453736726058376, - "learning_rate": 8.84289275972363e-06, - "loss": 0.0001, - "step": 1959 - }, - { - "epoch": 0.5204460966542751, - "grad_norm": 4.799487348660996, - "learning_rate": 8.841487881330126e-06, - "loss": 0.2891, - "step": 1960 - }, - { - "epoch": 0.5207116303770579, - "grad_norm": 0.00898819327183765, - "learning_rate": 8.840082262343064e-06, - "loss": 0.0002, - "step": 1961 - }, - { - "epoch": 0.5209771640998406, - "grad_norm": 0.11036124994527378, - "learning_rate": 8.838675903033432e-06, - "loss": 0.0117, - "step": 1962 - }, - { - "epoch": 0.5212426978226234, - "grad_norm": 0.17194701223020112, - "learning_rate": 8.837268803672358e-06, - "loss": 0.0184, - "step": 1963 - }, - { - "epoch": 0.5215082315454063, - "grad_norm": 0.19802383357405098, - "learning_rate": 8.835860964531115e-06, - "loss": 0.0206, - "step": 1964 - }, - { - "epoch": 0.5217737652681891, - "grad_norm": 0.11041893858226348, - "learning_rate": 8.834452385881121e-06, - "loss": 0.0117, - "step": 1965 - }, - { - "epoch": 0.5220392989909719, - "grad_norm": 0.6156090855902048, - "learning_rate": 8.833043067993926e-06, - "loss": 0.0233, - "step": 1966 - }, - { - "epoch": 0.5223048327137546, - "grad_norm": 0.14201533586884124, - "learning_rate": 8.831633011141237e-06, - "loss": 0.0146, - "step": 1967 - }, - { - "epoch": 0.5225703664365374, - "grad_norm": 0.09288200859356922, - "learning_rate": 8.83022221559489e-06, - "loss": 0.0092, - "step": 1968 - }, - { - "epoch": 0.5228359001593202, - "grad_norm": 0.9615613402120565, - "learning_rate": 8.828810681626876e-06, - "loss": 0.1455, - "step": 1969 - }, - { - "epoch": 0.5231014338821031, - "grad_norm": 1.3056914957728305, - "learning_rate": 8.827398409509318e-06, - "loss": 0.0476, - "step": 1970 - }, - { - "epoch": 0.5233669676048858, - "grad_norm": 0.9123728894775064, - "learning_rate": 8.825985399514488e-06, - "loss": 0.1738, - "step": 1971 - }, - { - "epoch": 0.5236325013276686, - "grad_norm": 0.5116325107103561, - "learning_rate": 8.824571651914796e-06, - "loss": 0.0354, - "step": 1972 - }, - { - "epoch": 0.5238980350504514, - "grad_norm": 1.0931368397378376, - "learning_rate": 8.823157166982794e-06, - "loss": 0.0889, - "step": 1973 - }, - { - "epoch": 0.5241635687732342, - "grad_norm": 1.5038247940641931, - "learning_rate": 8.821741944991182e-06, - "loss": 0.1191, - "step": 1974 - }, - { - "epoch": 0.524429102496017, - "grad_norm": 2.1410167922056185, - "learning_rate": 8.820325986212796e-06, - "loss": 0.0957, - "step": 1975 - }, - { - "epoch": 0.5246946362187997, - "grad_norm": 1.4124271244242217, - "learning_rate": 8.818909290920617e-06, - "loss": 0.063, - "step": 1976 - }, - { - "epoch": 0.5249601699415826, - "grad_norm": 0.044914449959208055, - "learning_rate": 8.817491859387765e-06, - "loss": 0.001, - "step": 1977 - }, - { - "epoch": 0.5252257036643654, - "grad_norm": 0.7192623272990704, - "learning_rate": 8.816073691887506e-06, - "loss": 0.1455, - "step": 1978 - }, - { - "epoch": 0.5254912373871482, - "grad_norm": 0.4995422114884727, - "learning_rate": 8.814654788693246e-06, - "loss": 0.0391, - "step": 1979 - }, - { - "epoch": 0.5257567711099309, - "grad_norm": 0.2947556592020414, - "learning_rate": 8.813235150078532e-06, - "loss": 0.0286, - "step": 1980 - }, - { - "epoch": 0.5260223048327137, - "grad_norm": 0.2865413479084726, - "learning_rate": 8.811814776317056e-06, - "loss": 0.0255, - "step": 1981 - }, - { - "epoch": 0.5262878385554965, - "grad_norm": 2.2341427898489776, - "learning_rate": 8.810393667682645e-06, - "loss": 0.0815, - "step": 1982 - }, - { - "epoch": 0.5265533722782794, - "grad_norm": 0.1433749071821932, - "learning_rate": 8.808971824449276e-06, - "loss": 0.0165, - "step": 1983 - }, - { - "epoch": 0.5268189060010622, - "grad_norm": 1.1251534229014521, - "learning_rate": 8.80754924689106e-06, - "loss": 0.0815, - "step": 1984 - }, - { - "epoch": 0.5270844397238449, - "grad_norm": 0.9046870543343221, - "learning_rate": 8.806125935282258e-06, - "loss": 0.0231, - "step": 1985 - }, - { - "epoch": 0.5273499734466277, - "grad_norm": 2.59520506547441, - "learning_rate": 8.804701889897261e-06, - "loss": 0.1641, - "step": 1986 - }, - { - "epoch": 0.5276155071694105, - "grad_norm": 1.1343653355959855, - "learning_rate": 8.803277111010613e-06, - "loss": 0.0688, - "step": 1987 - }, - { - "epoch": 0.5278810408921933, - "grad_norm": 1.0935958000724277, - "learning_rate": 8.801851598896996e-06, - "loss": 0.0293, - "step": 1988 - }, - { - "epoch": 0.528146574614976, - "grad_norm": 0.13718069103720956, - "learning_rate": 8.800425353831227e-06, - "loss": 0.0165, - "step": 1989 - }, - { - "epoch": 0.5284121083377589, - "grad_norm": 0.1382767894506031, - "learning_rate": 8.798998376088273e-06, - "loss": 0.0165, - "step": 1990 - }, - { - "epoch": 0.5286776420605417, - "grad_norm": 0.6992769860574649, - "learning_rate": 8.797570665943236e-06, - "loss": 0.1553, - "step": 1991 - }, - { - "epoch": 0.5289431757833245, - "grad_norm": 0.7959070902740847, - "learning_rate": 8.796142223671362e-06, - "loss": 0.1279, - "step": 1992 - }, - { - "epoch": 0.5292087095061073, - "grad_norm": 0.003383773068786995, - "learning_rate": 8.79471304954804e-06, - "loss": 0.0001, - "step": 1993 - }, - { - "epoch": 0.52947424322889, - "grad_norm": 1.6911270754687433, - "learning_rate": 8.793283143848796e-06, - "loss": 0.1035, - "step": 1994 - }, - { - "epoch": 0.5297397769516728, - "grad_norm": 0.25896993348139163, - "learning_rate": 8.791852506849301e-06, - "loss": 0.0258, - "step": 1995 - }, - { - "epoch": 0.5300053106744557, - "grad_norm": 0.1990072374637002, - "learning_rate": 8.790421138825362e-06, - "loss": 0.0229, - "step": 1996 - }, - { - "epoch": 0.5302708443972385, - "grad_norm": 0.12393114254257008, - "learning_rate": 8.788989040052932e-06, - "loss": 0.0146, - "step": 1997 - }, - { - "epoch": 0.5305363781200212, - "grad_norm": 0.1350871742151799, - "learning_rate": 8.787556210808101e-06, - "loss": 0.0165, - "step": 1998 - }, - { - "epoch": 0.530801911842804, - "grad_norm": 0.13938411153478383, - "learning_rate": 8.786122651367104e-06, - "loss": 0.0184, - "step": 1999 - }, - { - "epoch": 0.5310674455655868, - "grad_norm": 1.1760763074160063, - "learning_rate": 8.784688362006311e-06, - "loss": 0.1191, - "step": 2000 - } - ], - "logging_steps": 1.0, - "max_steps": 7532, - "num_input_tokens_seen": 0, - "num_train_epochs": 2, - "save_steps": 2000, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": false - }, - "attributes": {} - } - }, - "total_flos": 381199319040000.0, - "train_batch_size": 1, - "trial_name": null, - "trial_params": null -}