| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 11757, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0025516713447307987, | |
| "grad_norm": 67.57911682128906, | |
| "learning_rate": 5.102040816326531e-08, | |
| "loss": 4.2312, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0051033426894615975, | |
| "grad_norm": 66.49089813232422, | |
| "learning_rate": 1.0204081632653062e-07, | |
| "loss": 4.2064, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.007655014034192396, | |
| "grad_norm": 63.74226379394531, | |
| "learning_rate": 1.5306122448979592e-07, | |
| "loss": 4.1169, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.010206685378923195, | |
| "grad_norm": 67.60637664794922, | |
| "learning_rate": 2.0408163265306124e-07, | |
| "loss": 3.9093, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.012758356723653993, | |
| "grad_norm": 58.32143783569336, | |
| "learning_rate": 2.551020408163265e-07, | |
| "loss": 3.2288, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.015310028068384792, | |
| "grad_norm": 44.05619430541992, | |
| "learning_rate": 3.0612244897959183e-07, | |
| "loss": 2.2825, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.017861699413115592, | |
| "grad_norm": 28.302391052246094, | |
| "learning_rate": 3.5714285714285716e-07, | |
| "loss": 1.3651, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02041337075784639, | |
| "grad_norm": 23.326255798339844, | |
| "learning_rate": 4.081632653061225e-07, | |
| "loss": 0.8761, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.022965042102577188, | |
| "grad_norm": 25.92929458618164, | |
| "learning_rate": 4.5918367346938775e-07, | |
| "loss": 0.5584, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.025516713447307986, | |
| "grad_norm": 18.867393493652344, | |
| "learning_rate": 5.10204081632653e-07, | |
| "loss": 0.4095, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.028068384792038787, | |
| "grad_norm": 26.22024917602539, | |
| "learning_rate": 5.612244897959184e-07, | |
| "loss": 0.321, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.030620056136769585, | |
| "grad_norm": 17.609697341918945, | |
| "learning_rate": 6.122448979591837e-07, | |
| "loss": 0.2597, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03317172748150038, | |
| "grad_norm": 13.896285057067871, | |
| "learning_rate": 6.63265306122449e-07, | |
| "loss": 0.2442, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.035723398826231184, | |
| "grad_norm": 17.147130966186523, | |
| "learning_rate": 7.142857142857143e-07, | |
| "loss": 0.2128, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03827507017096198, | |
| "grad_norm": 16.05860137939453, | |
| "learning_rate": 7.653061224489796e-07, | |
| "loss": 0.2197, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04082674151569278, | |
| "grad_norm": 11.800621032714844, | |
| "learning_rate": 8.16326530612245e-07, | |
| "loss": 0.1864, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04337841286042358, | |
| "grad_norm": 15.409507751464844, | |
| "learning_rate": 8.673469387755102e-07, | |
| "loss": 0.183, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.045930084205154376, | |
| "grad_norm": 20.847375869750977, | |
| "learning_rate": 9.183673469387755e-07, | |
| "loss": 0.1753, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.048481755549885173, | |
| "grad_norm": 22.60802459716797, | |
| "learning_rate": 9.69387755102041e-07, | |
| "loss": 0.1571, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05103342689461597, | |
| "grad_norm": 14.373199462890625, | |
| "learning_rate": 1.020408163265306e-06, | |
| "loss": 0.1538, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05358509823934677, | |
| "grad_norm": 10.351411819458008, | |
| "learning_rate": 1.0714285714285716e-06, | |
| "loss": 0.1412, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.056136769584077574, | |
| "grad_norm": 18.28008270263672, | |
| "learning_rate": 1.122448979591837e-06, | |
| "loss": 0.1428, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05868844092880837, | |
| "grad_norm": 10.837668418884277, | |
| "learning_rate": 1.173469387755102e-06, | |
| "loss": 0.1383, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06124011227353917, | |
| "grad_norm": 14.021323204040527, | |
| "learning_rate": 1.2244897959183673e-06, | |
| "loss": 0.1261, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06379178361826997, | |
| "grad_norm": 20.093788146972656, | |
| "learning_rate": 1.2755102040816327e-06, | |
| "loss": 0.1283, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06634345496300076, | |
| "grad_norm": 12.026954650878906, | |
| "learning_rate": 1.326530612244898e-06, | |
| "loss": 0.1357, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06889512630773156, | |
| "grad_norm": 15.393950462341309, | |
| "learning_rate": 1.3775510204081633e-06, | |
| "loss": 0.1188, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07144679765246237, | |
| "grad_norm": 9.086231231689453, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 0.1205, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07399846899719316, | |
| "grad_norm": 14.759052276611328, | |
| "learning_rate": 1.479591836734694e-06, | |
| "loss": 0.1095, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07655014034192396, | |
| "grad_norm": 16.71165657043457, | |
| "learning_rate": 1.5306122448979593e-06, | |
| "loss": 0.1091, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07910181168665475, | |
| "grad_norm": 9.48653507232666, | |
| "learning_rate": 1.5816326530612246e-06, | |
| "loss": 0.112, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08165348303138556, | |
| "grad_norm": 13.855453491210938, | |
| "learning_rate": 1.63265306122449e-06, | |
| "loss": 0.1131, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08420515437611635, | |
| "grad_norm": 8.177651405334473, | |
| "learning_rate": 1.683673469387755e-06, | |
| "loss": 0.1034, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08675682572084716, | |
| "grad_norm": 10.370412826538086, | |
| "learning_rate": 1.7346938775510204e-06, | |
| "loss": 0.1044, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.08930849706557796, | |
| "grad_norm": 8.438699722290039, | |
| "learning_rate": 1.7857142857142857e-06, | |
| "loss": 0.0935, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.09186016841030875, | |
| "grad_norm": 8.10465145111084, | |
| "learning_rate": 1.836734693877551e-06, | |
| "loss": 0.09, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09441183975503956, | |
| "grad_norm": 8.28003215789795, | |
| "learning_rate": 1.8877551020408165e-06, | |
| "loss": 0.0926, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09696351109977035, | |
| "grad_norm": 8.460153579711914, | |
| "learning_rate": 1.938775510204082e-06, | |
| "loss": 0.0999, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09951518244450115, | |
| "grad_norm": 8.236766815185547, | |
| "learning_rate": 1.989795918367347e-06, | |
| "loss": 0.0859, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.10206685378923194, | |
| "grad_norm": 8.402081489562988, | |
| "learning_rate": 2.040816326530612e-06, | |
| "loss": 0.0878, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.10461852513396275, | |
| "grad_norm": 24.110572814941406, | |
| "learning_rate": 2.0918367346938776e-06, | |
| "loss": 0.0936, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.10717019647869354, | |
| "grad_norm": 7.079232215881348, | |
| "learning_rate": 2.142857142857143e-06, | |
| "loss": 0.0881, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.10972186782342434, | |
| "grad_norm": 9.083630561828613, | |
| "learning_rate": 2.1938775510204083e-06, | |
| "loss": 0.0874, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.11227353916815515, | |
| "grad_norm": 7.967429161071777, | |
| "learning_rate": 2.244897959183674e-06, | |
| "loss": 0.0932, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.11482521051288594, | |
| "grad_norm": 9.130484580993652, | |
| "learning_rate": 2.295918367346939e-06, | |
| "loss": 0.0862, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11737688185761674, | |
| "grad_norm": 6.145804405212402, | |
| "learning_rate": 2.346938775510204e-06, | |
| "loss": 0.0906, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11992855320234753, | |
| "grad_norm": 9.372196197509766, | |
| "learning_rate": 2.3979591836734696e-06, | |
| "loss": 0.0964, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.12248022454707834, | |
| "grad_norm": 11.447351455688477, | |
| "learning_rate": 2.4489795918367347e-06, | |
| "loss": 0.0798, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.12503189589180913, | |
| "grad_norm": 8.37991714477539, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0919, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.12758356723653994, | |
| "grad_norm": 6.319784164428711, | |
| "learning_rate": 2.5510204081632653e-06, | |
| "loss": 0.0794, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.13013523858127074, | |
| "grad_norm": 5.678624629974365, | |
| "learning_rate": 2.6020408163265304e-06, | |
| "loss": 0.0728, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.13268690992600152, | |
| "grad_norm": 7.462113857269287, | |
| "learning_rate": 2.653061224489796e-06, | |
| "loss": 0.0754, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.13523858127073232, | |
| "grad_norm": 4.428163528442383, | |
| "learning_rate": 2.7040816326530615e-06, | |
| "loss": 0.0873, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.13779025261546313, | |
| "grad_norm": 6.491887092590332, | |
| "learning_rate": 2.7551020408163266e-06, | |
| "loss": 0.071, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.14034192396019393, | |
| "grad_norm": 5.226602554321289, | |
| "learning_rate": 2.806122448979592e-06, | |
| "loss": 0.0737, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.14289359530492474, | |
| "grad_norm": 5.6451191902160645, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 0.0802, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1454452666496555, | |
| "grad_norm": 6.9333600997924805, | |
| "learning_rate": 2.9081632653061224e-06, | |
| "loss": 0.0773, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.14799693799438632, | |
| "grad_norm": 5.240703582763672, | |
| "learning_rate": 2.959183673469388e-06, | |
| "loss": 0.0748, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.15054860933911712, | |
| "grad_norm": 7.25963020324707, | |
| "learning_rate": 3.010204081632653e-06, | |
| "loss": 0.0755, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.15310028068384793, | |
| "grad_norm": 6.730240821838379, | |
| "learning_rate": 3.0612244897959185e-06, | |
| "loss": 0.0724, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.15565195202857873, | |
| "grad_norm": 5.880996227264404, | |
| "learning_rate": 3.1122448979591837e-06, | |
| "loss": 0.0774, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1582036233733095, | |
| "grad_norm": 3.661736488342285, | |
| "learning_rate": 3.163265306122449e-06, | |
| "loss": 0.0739, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.16075529471804031, | |
| "grad_norm": 6.331421375274658, | |
| "learning_rate": 3.2142857142857143e-06, | |
| "loss": 0.0741, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.16330696606277112, | |
| "grad_norm": 3.5448763370513916, | |
| "learning_rate": 3.26530612244898e-06, | |
| "loss": 0.0733, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.16585863740750192, | |
| "grad_norm": 4.7668023109436035, | |
| "learning_rate": 3.316326530612245e-06, | |
| "loss": 0.0649, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1684103087522327, | |
| "grad_norm": 4.21407413482666, | |
| "learning_rate": 3.36734693877551e-06, | |
| "loss": 0.071, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1709619800969635, | |
| "grad_norm": 6.080636501312256, | |
| "learning_rate": 3.4183673469387756e-06, | |
| "loss": 0.0753, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.1735136514416943, | |
| "grad_norm": 3.409601926803589, | |
| "learning_rate": 3.4693877551020407e-06, | |
| "loss": 0.0755, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.17606532278642512, | |
| "grad_norm": 4.414758682250977, | |
| "learning_rate": 3.5204081632653062e-06, | |
| "loss": 0.069, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.17861699413115592, | |
| "grad_norm": 6.209197044372559, | |
| "learning_rate": 3.5714285714285714e-06, | |
| "loss": 0.0862, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1811686654758867, | |
| "grad_norm": 7.26833963394165, | |
| "learning_rate": 3.6224489795918365e-06, | |
| "loss": 0.0699, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.1837203368206175, | |
| "grad_norm": 4.285154342651367, | |
| "learning_rate": 3.673469387755102e-06, | |
| "loss": 0.0655, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.1862720081653483, | |
| "grad_norm": 3.7479612827301025, | |
| "learning_rate": 3.724489795918367e-06, | |
| "loss": 0.0605, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1888236795100791, | |
| "grad_norm": 4.82859468460083, | |
| "learning_rate": 3.775510204081633e-06, | |
| "loss": 0.074, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.1913753508548099, | |
| "grad_norm": 4.887997150421143, | |
| "learning_rate": 3.826530612244898e-06, | |
| "loss": 0.0738, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1939270221995407, | |
| "grad_norm": 6.674945831298828, | |
| "learning_rate": 3.877551020408164e-06, | |
| "loss": 0.0584, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1964786935442715, | |
| "grad_norm": 4.140785217285156, | |
| "learning_rate": 3.928571428571429e-06, | |
| "loss": 0.0608, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1990303648890023, | |
| "grad_norm": 5.594793319702148, | |
| "learning_rate": 3.979591836734694e-06, | |
| "loss": 0.066, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2015820362337331, | |
| "grad_norm": 8.363929748535156, | |
| "learning_rate": 4.030612244897959e-06, | |
| "loss": 0.0752, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.20413370757846389, | |
| "grad_norm": 4.264303684234619, | |
| "learning_rate": 4.081632653061224e-06, | |
| "loss": 0.0662, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2066853789231947, | |
| "grad_norm": 3.919865131378174, | |
| "learning_rate": 4.13265306122449e-06, | |
| "loss": 0.0707, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2092370502679255, | |
| "grad_norm": 3.484290838241577, | |
| "learning_rate": 4.183673469387755e-06, | |
| "loss": 0.0671, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2117887216126563, | |
| "grad_norm": 4.169510841369629, | |
| "learning_rate": 4.23469387755102e-06, | |
| "loss": 0.0642, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.21434039295738708, | |
| "grad_norm": 2.740579843521118, | |
| "learning_rate": 4.285714285714286e-06, | |
| "loss": 0.0588, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.21689206430211788, | |
| "grad_norm": 3.434375047683716, | |
| "learning_rate": 4.3367346938775506e-06, | |
| "loss": 0.0537, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2194437356468487, | |
| "grad_norm": 4.923778533935547, | |
| "learning_rate": 4.3877551020408165e-06, | |
| "loss": 0.0702, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2219954069915795, | |
| "grad_norm": 3.2125160694122314, | |
| "learning_rate": 4.438775510204082e-06, | |
| "loss": 0.0618, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2245470783363103, | |
| "grad_norm": 3.649038076400757, | |
| "learning_rate": 4.489795918367348e-06, | |
| "loss": 0.0641, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.22709874968104107, | |
| "grad_norm": 2.1178414821624756, | |
| "learning_rate": 4.540816326530613e-06, | |
| "loss": 0.0544, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.22965042102577188, | |
| "grad_norm": 5.1373467445373535, | |
| "learning_rate": 4.591836734693878e-06, | |
| "loss": 0.0574, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.23220209237050268, | |
| "grad_norm": 3.6157491207122803, | |
| "learning_rate": 4.642857142857143e-06, | |
| "loss": 0.0617, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2347537637152335, | |
| "grad_norm": 4.86325216293335, | |
| "learning_rate": 4.693877551020408e-06, | |
| "loss": 0.055, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.23730543505996426, | |
| "grad_norm": 2.5493531227111816, | |
| "learning_rate": 4.744897959183674e-06, | |
| "loss": 0.0628, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.23985710640469507, | |
| "grad_norm": 2.850733518600464, | |
| "learning_rate": 4.795918367346939e-06, | |
| "loss": 0.0621, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.24240877774942587, | |
| "grad_norm": 3.4511430263519287, | |
| "learning_rate": 4.846938775510204e-06, | |
| "loss": 0.0574, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.24496044909415668, | |
| "grad_norm": 3.961998224258423, | |
| "learning_rate": 4.897959183673469e-06, | |
| "loss": 0.0522, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.24751212043888748, | |
| "grad_norm": 3.7193942070007324, | |
| "learning_rate": 4.9489795918367345e-06, | |
| "loss": 0.059, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.25006379178361826, | |
| "grad_norm": 2.4739599227905273, | |
| "learning_rate": 5e-06, | |
| "loss": 0.062, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.2526154631283491, | |
| "grad_norm": 5.526200771331787, | |
| "learning_rate": 5.0510204081632655e-06, | |
| "loss": 0.0613, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.25516713447307987, | |
| "grad_norm": 2.617518186569214, | |
| "learning_rate": 5.102040816326531e-06, | |
| "loss": 0.0605, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.25771880581781065, | |
| "grad_norm": 2.276796340942383, | |
| "learning_rate": 5.153061224489796e-06, | |
| "loss": 0.0509, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2602704771625415, | |
| "grad_norm": 1.4227871894836426, | |
| "learning_rate": 5.204081632653061e-06, | |
| "loss": 0.0562, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.26282214850727226, | |
| "grad_norm": 4.203145503997803, | |
| "learning_rate": 5.255102040816327e-06, | |
| "loss": 0.0588, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.26537381985200303, | |
| "grad_norm": 2.96553111076355, | |
| "learning_rate": 5.306122448979592e-06, | |
| "loss": 0.0578, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.26792549119673387, | |
| "grad_norm": 2.9146676063537598, | |
| "learning_rate": 5.357142857142858e-06, | |
| "loss": 0.0616, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.27047716254146464, | |
| "grad_norm": 2.5181937217712402, | |
| "learning_rate": 5.408163265306123e-06, | |
| "loss": 0.057, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.2730288338861955, | |
| "grad_norm": 2.5414464473724365, | |
| "learning_rate": 5.459183673469387e-06, | |
| "loss": 0.0567, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.27558050523092625, | |
| "grad_norm": 1.901694893836975, | |
| "learning_rate": 5.510204081632653e-06, | |
| "loss": 0.0515, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.27813217657565703, | |
| "grad_norm": 3.047863245010376, | |
| "learning_rate": 5.561224489795918e-06, | |
| "loss": 0.052, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.28068384792038786, | |
| "grad_norm": 3.3792757987976074, | |
| "learning_rate": 5.612244897959184e-06, | |
| "loss": 0.065, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.28323551926511864, | |
| "grad_norm": 4.105442047119141, | |
| "learning_rate": 5.663265306122449e-06, | |
| "loss": 0.0537, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.2857871906098495, | |
| "grad_norm": 4.40492582321167, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.0529, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.28833886195458025, | |
| "grad_norm": 3.5336432456970215, | |
| "learning_rate": 5.76530612244898e-06, | |
| "loss": 0.0497, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.290890533299311, | |
| "grad_norm": 3.989068031311035, | |
| "learning_rate": 5.816326530612245e-06, | |
| "loss": 0.0575, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.29344220464404186, | |
| "grad_norm": 1.8923063278198242, | |
| "learning_rate": 5.867346938775511e-06, | |
| "loss": 0.0495, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.29599387598877264, | |
| "grad_norm": 3.7669458389282227, | |
| "learning_rate": 5.918367346938776e-06, | |
| "loss": 0.0489, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.29854554733350347, | |
| "grad_norm": 2.3980700969696045, | |
| "learning_rate": 5.969387755102041e-06, | |
| "loss": 0.0523, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.30109721867823425, | |
| "grad_norm": 1.5095144510269165, | |
| "learning_rate": 5.999997884283333e-06, | |
| "loss": 0.0514, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.303648890022965, | |
| "grad_norm": 3.0456111431121826, | |
| "learning_rate": 5.9999740825051035e-06, | |
| "loss": 0.0577, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.30620056136769586, | |
| "grad_norm": 2.5419769287109375, | |
| "learning_rate": 5.999923834513332e-06, | |
| "loss": 0.0508, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.30875223271242663, | |
| "grad_norm": 4.042941093444824, | |
| "learning_rate": 5.999847140750981e-06, | |
| "loss": 0.0578, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.31130390405715747, | |
| "grad_norm": 3.173715591430664, | |
| "learning_rate": 5.9997440018941426e-06, | |
| "loss": 0.0586, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.31385557540188824, | |
| "grad_norm": 2.7003302574157715, | |
| "learning_rate": 5.9996144188520335e-06, | |
| "loss": 0.0571, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.316407246746619, | |
| "grad_norm": 3.698715925216675, | |
| "learning_rate": 5.999458392766994e-06, | |
| "loss": 0.0481, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.31895891809134985, | |
| "grad_norm": 2.736983060836792, | |
| "learning_rate": 5.999275925014465e-06, | |
| "loss": 0.0511, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.32151058943608063, | |
| "grad_norm": 1.4954791069030762, | |
| "learning_rate": 5.999067017202991e-06, | |
| "loss": 0.0496, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.3240622607808114, | |
| "grad_norm": 2.0082359313964844, | |
| "learning_rate": 5.998831671174193e-06, | |
| "loss": 0.0486, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.32661393212554224, | |
| "grad_norm": 7.479063510894775, | |
| "learning_rate": 5.998569889002759e-06, | |
| "loss": 0.0614, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.329165603470273, | |
| "grad_norm": 2.5666940212249756, | |
| "learning_rate": 5.998281672996425e-06, | |
| "loss": 0.0474, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.33171727481500385, | |
| "grad_norm": 3.8249595165252686, | |
| "learning_rate": 5.997967025695955e-06, | |
| "loss": 0.0461, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3342689461597346, | |
| "grad_norm": 2.665925979614258, | |
| "learning_rate": 5.997625949875115e-06, | |
| "loss": 0.0486, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3368206175044654, | |
| "grad_norm": 1.3909707069396973, | |
| "learning_rate": 5.997258448540651e-06, | |
| "loss": 0.0546, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.33937228884919624, | |
| "grad_norm": 2.5048632621765137, | |
| "learning_rate": 5.996864524932266e-06, | |
| "loss": 0.0462, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.341923960193927, | |
| "grad_norm": 2.33251690864563, | |
| "learning_rate": 5.996444182522586e-06, | |
| "loss": 0.0521, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.34447563153865784, | |
| "grad_norm": 1.7945542335510254, | |
| "learning_rate": 5.99599742501713e-06, | |
| "loss": 0.0474, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.3470273028833886, | |
| "grad_norm": 2.4074184894561768, | |
| "learning_rate": 5.995524256354281e-06, | |
| "loss": 0.0483, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.3495789742281194, | |
| "grad_norm": 2.026684522628784, | |
| "learning_rate": 5.995024680705248e-06, | |
| "loss": 0.0492, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.35213064557285023, | |
| "grad_norm": 2.6286892890930176, | |
| "learning_rate": 5.994498702474031e-06, | |
| "loss": 0.0468, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.354682316917581, | |
| "grad_norm": 1.3679580688476562, | |
| "learning_rate": 5.993946326297379e-06, | |
| "loss": 0.0472, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.35723398826231184, | |
| "grad_norm": 2.495452642440796, | |
| "learning_rate": 5.9933675570447556e-06, | |
| "loss": 0.0449, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.3597856596070426, | |
| "grad_norm": 2.3190741539001465, | |
| "learning_rate": 5.99276239981829e-06, | |
| "loss": 0.0481, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3623373309517734, | |
| "grad_norm": 1.6283559799194336, | |
| "learning_rate": 5.9921308599527325e-06, | |
| "loss": 0.0546, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.36488900229650423, | |
| "grad_norm": 1.187363624572754, | |
| "learning_rate": 5.991472943015412e-06, | |
| "loss": 0.0431, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.367440673641235, | |
| "grad_norm": 1.6949318647384644, | |
| "learning_rate": 5.990788654806182e-06, | |
| "loss": 0.0477, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.3699923449859658, | |
| "grad_norm": 1.8516268730163574, | |
| "learning_rate": 5.9900780013573714e-06, | |
| "loss": 0.0414, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3725440163306966, | |
| "grad_norm": 2.0654797554016113, | |
| "learning_rate": 5.989340988933732e-06, | |
| "loss": 0.0384, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3750956876754274, | |
| "grad_norm": 1.335979700088501, | |
| "learning_rate": 5.988577624032384e-06, | |
| "loss": 0.0405, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3776473590201582, | |
| "grad_norm": 3.470012664794922, | |
| "learning_rate": 5.987787913382754e-06, | |
| "loss": 0.0464, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.380199030364889, | |
| "grad_norm": 1.7210215330123901, | |
| "learning_rate": 5.9869718639465234e-06, | |
| "loss": 0.051, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3827507017096198, | |
| "grad_norm": 2.3212034702301025, | |
| "learning_rate": 5.986129482917558e-06, | |
| "loss": 0.0474, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.3853023730543506, | |
| "grad_norm": 3.0615479946136475, | |
| "learning_rate": 5.985260777721852e-06, | |
| "loss": 0.044, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.3878540443990814, | |
| "grad_norm": 1.6388686895370483, | |
| "learning_rate": 5.9843657560174604e-06, | |
| "loss": 0.046, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3904057157438122, | |
| "grad_norm": 2.6303327083587646, | |
| "learning_rate": 5.983444425694428e-06, | |
| "loss": 0.0435, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.392957387088543, | |
| "grad_norm": 1.6358684301376343, | |
| "learning_rate": 5.982496794874726e-06, | |
| "loss": 0.0382, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.3955090584332738, | |
| "grad_norm": 1.609343409538269, | |
| "learning_rate": 5.981522871912175e-06, | |
| "loss": 0.0411, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3980607297780046, | |
| "grad_norm": 2.244525194168091, | |
| "learning_rate": 5.980522665392373e-06, | |
| "loss": 0.0473, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.4006124011227354, | |
| "grad_norm": 3.230689525604248, | |
| "learning_rate": 5.979496184132622e-06, | |
| "loss": 0.0452, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.4031640724674662, | |
| "grad_norm": 3.2919723987579346, | |
| "learning_rate": 5.978443437181849e-06, | |
| "loss": 0.0449, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.405715743812197, | |
| "grad_norm": 1.8723994493484497, | |
| "learning_rate": 5.977364433820522e-06, | |
| "loss": 0.0408, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.40826741515692777, | |
| "grad_norm": 3.2039082050323486, | |
| "learning_rate": 5.976259183560576e-06, | |
| "loss": 0.0465, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.4108190865016586, | |
| "grad_norm": 1.4034531116485596, | |
| "learning_rate": 5.975127696145324e-06, | |
| "loss": 0.0417, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.4133707578463894, | |
| "grad_norm": 2.3331546783447266, | |
| "learning_rate": 5.97396998154937e-06, | |
| "loss": 0.0418, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.41592242919112016, | |
| "grad_norm": 1.5517454147338867, | |
| "learning_rate": 5.972786049978524e-06, | |
| "loss": 0.0428, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.418474100535851, | |
| "grad_norm": 3.171250104904175, | |
| "learning_rate": 5.971575911869713e-06, | |
| "loss": 0.0408, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.42102577188058177, | |
| "grad_norm": 2.458007574081421, | |
| "learning_rate": 5.9703395778908865e-06, | |
| "loss": 0.0472, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.4235774432253126, | |
| "grad_norm": 1.4991718530654907, | |
| "learning_rate": 5.969077058940923e-06, | |
| "loss": 0.0408, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.4261291145700434, | |
| "grad_norm": 2.1240978240966797, | |
| "learning_rate": 5.967788366149531e-06, | |
| "loss": 0.0414, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.42868078591477415, | |
| "grad_norm": 3.0107381343841553, | |
| "learning_rate": 5.96647351087716e-06, | |
| "loss": 0.04, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.431232457259505, | |
| "grad_norm": 2.087351083755493, | |
| "learning_rate": 5.965132504714891e-06, | |
| "loss": 0.0412, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.43378412860423576, | |
| "grad_norm": 1.0149552822113037, | |
| "learning_rate": 5.963765359484337e-06, | |
| "loss": 0.0424, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.4363357999489666, | |
| "grad_norm": 2.0345070362091064, | |
| "learning_rate": 5.962372087237539e-06, | |
| "loss": 0.035, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4388874712936974, | |
| "grad_norm": 2.1437127590179443, | |
| "learning_rate": 5.9609527002568646e-06, | |
| "loss": 0.0358, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.44143914263842815, | |
| "grad_norm": 2.7861342430114746, | |
| "learning_rate": 5.9595072110548906e-06, | |
| "loss": 0.0403, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.443990813983159, | |
| "grad_norm": 1.211660385131836, | |
| "learning_rate": 5.958035632374299e-06, | |
| "loss": 0.043, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.44654248532788976, | |
| "grad_norm": 3.840953826904297, | |
| "learning_rate": 5.956537977187764e-06, | |
| "loss": 0.0393, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.4490941566726206, | |
| "grad_norm": 1.265952706336975, | |
| "learning_rate": 5.955014258697835e-06, | |
| "loss": 0.043, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.45164582801735137, | |
| "grad_norm": 4.032707214355469, | |
| "learning_rate": 5.953464490336823e-06, | |
| "loss": 0.0323, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.45419749936208215, | |
| "grad_norm": 1.410612940788269, | |
| "learning_rate": 5.951888685766683e-06, | |
| "loss": 0.0381, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.456749170706813, | |
| "grad_norm": 1.7868704795837402, | |
| "learning_rate": 5.950286858878886e-06, | |
| "loss": 0.0417, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.45930084205154376, | |
| "grad_norm": 3.2607359886169434, | |
| "learning_rate": 5.948659023794309e-06, | |
| "loss": 0.0388, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.46185251339627453, | |
| "grad_norm": 1.940967321395874, | |
| "learning_rate": 5.9470051948631e-06, | |
| "loss": 0.0407, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.46440418474100537, | |
| "grad_norm": 1.7820950746536255, | |
| "learning_rate": 5.945325386664556e-06, | |
| "loss": 0.0351, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.46695585608573614, | |
| "grad_norm": 3.1679279804229736, | |
| "learning_rate": 5.943619614006993e-06, | |
| "loss": 0.0401, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.469507527430467, | |
| "grad_norm": 0.912773609161377, | |
| "learning_rate": 5.941887891927618e-06, | |
| "loss": 0.0311, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.47205919877519775, | |
| "grad_norm": 2.1303391456604004, | |
| "learning_rate": 5.9401302356923925e-06, | |
| "loss": 0.0398, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.47461087011992853, | |
| "grad_norm": 1.0795629024505615, | |
| "learning_rate": 5.938346660795902e-06, | |
| "loss": 0.0384, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.47716254146465936, | |
| "grad_norm": 1.5061334371566772, | |
| "learning_rate": 5.936537182961215e-06, | |
| "loss": 0.032, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.47971421280939014, | |
| "grad_norm": 1.0948927402496338, | |
| "learning_rate": 5.93470181813975e-06, | |
| "loss": 0.0378, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.48226588415412097, | |
| "grad_norm": 2.728391647338867, | |
| "learning_rate": 5.9328405825111296e-06, | |
| "loss": 0.0351, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.48481755549885175, | |
| "grad_norm": 2.6559503078460693, | |
| "learning_rate": 5.93095349248304e-06, | |
| "loss": 0.0321, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4873692268435825, | |
| "grad_norm": 0.8435110449790955, | |
| "learning_rate": 5.929040564691087e-06, | |
| "loss": 0.0371, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.48992089818831336, | |
| "grad_norm": 1.813321828842163, | |
| "learning_rate": 5.92710181599865e-06, | |
| "loss": 0.0374, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.49247256953304414, | |
| "grad_norm": 1.7775193452835083, | |
| "learning_rate": 5.925137263496729e-06, | |
| "loss": 0.0367, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.49502424087777497, | |
| "grad_norm": 1.2854493856430054, | |
| "learning_rate": 5.9231469245038e-06, | |
| "loss": 0.0415, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.49757591222250575, | |
| "grad_norm": 1.0430928468704224, | |
| "learning_rate": 5.921130816565655e-06, | |
| "loss": 0.0412, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.5001275835672365, | |
| "grad_norm": 1.0425087213516235, | |
| "learning_rate": 5.919088957455258e-06, | |
| "loss": 0.0394, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.5026792549119673, | |
| "grad_norm": 1.738433837890625, | |
| "learning_rate": 5.917021365172577e-06, | |
| "loss": 0.0314, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.5052309262566982, | |
| "grad_norm": 0.9304326176643372, | |
| "learning_rate": 5.914928057944431e-06, | |
| "loss": 0.0393, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.507782597601429, | |
| "grad_norm": 2.4667181968688965, | |
| "learning_rate": 5.912809054224332e-06, | |
| "loss": 0.0375, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.5103342689461597, | |
| "grad_norm": 1.0782585144042969, | |
| "learning_rate": 5.910664372692313e-06, | |
| "loss": 0.0336, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5128859402908905, | |
| "grad_norm": 1.5189335346221924, | |
| "learning_rate": 5.9084940322547754e-06, | |
| "loss": 0.0366, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.5154376116356213, | |
| "grad_norm": 2.4441661834716797, | |
| "learning_rate": 5.9062980520443115e-06, | |
| "loss": 0.0341, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.5179892829803522, | |
| "grad_norm": 1.2969788312911987, | |
| "learning_rate": 5.904076451419543e-06, | |
| "loss": 0.0309, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.520540954325083, | |
| "grad_norm": 1.3563958406448364, | |
| "learning_rate": 5.901829249964948e-06, | |
| "loss": 0.0276, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.5230926256698137, | |
| "grad_norm": 2.4599099159240723, | |
| "learning_rate": 5.899556467490687e-06, | |
| "loss": 0.0328, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.5256442970145445, | |
| "grad_norm": 2.01973557472229, | |
| "learning_rate": 5.897258124032429e-06, | |
| "loss": 0.038, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.5281959683592753, | |
| "grad_norm": 1.541785717010498, | |
| "learning_rate": 5.894934239851178e-06, | |
| "loss": 0.0342, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.5307476397040061, | |
| "grad_norm": 1.1081935167312622, | |
| "learning_rate": 5.892584835433089e-06, | |
| "loss": 0.033, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.533299311048737, | |
| "grad_norm": 1.496882677078247, | |
| "learning_rate": 5.890209931489292e-06, | |
| "loss": 0.029, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.5358509823934677, | |
| "grad_norm": 12.85782241821289, | |
| "learning_rate": 5.887809548955706e-06, | |
| "loss": 0.0404, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5384026537381985, | |
| "grad_norm": 1.9677023887634277, | |
| "learning_rate": 5.885383708992856e-06, | |
| "loss": 0.0346, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.5409543250829293, | |
| "grad_norm": 1.9975990056991577, | |
| "learning_rate": 5.88293243298569e-06, | |
| "loss": 0.0364, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.5435059964276601, | |
| "grad_norm": 1.0146771669387817, | |
| "learning_rate": 5.880455742543382e-06, | |
| "loss": 0.0303, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.546057667772391, | |
| "grad_norm": 2.213881731033325, | |
| "learning_rate": 5.877953659499151e-06, | |
| "loss": 0.0319, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5486093391171217, | |
| "grad_norm": 1.3682197332382202, | |
| "learning_rate": 5.875426205910057e-06, | |
| "loss": 0.0343, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5511610104618525, | |
| "grad_norm": 1.53465735912323, | |
| "learning_rate": 5.872873404056825e-06, | |
| "loss": 0.0316, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5537126818065833, | |
| "grad_norm": 1.061879277229309, | |
| "learning_rate": 5.870295276443624e-06, | |
| "loss": 0.0317, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.5562643531513141, | |
| "grad_norm": 1.099205493927002, | |
| "learning_rate": 5.867691845797893e-06, | |
| "loss": 0.0312, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.558816024496045, | |
| "grad_norm": 0.7490290999412537, | |
| "learning_rate": 5.8650631350701205e-06, | |
| "loss": 0.0269, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.5613676958407757, | |
| "grad_norm": 1.189103603363037, | |
| "learning_rate": 5.862409167433658e-06, | |
| "loss": 0.0312, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5639193671855065, | |
| "grad_norm": 1.3587863445281982, | |
| "learning_rate": 5.859729966284505e-06, | |
| "loss": 0.0323, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.5664710385302373, | |
| "grad_norm": 0.6706628203392029, | |
| "learning_rate": 5.857025555241108e-06, | |
| "loss": 0.0296, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.5690227098749681, | |
| "grad_norm": 7.282918930053711, | |
| "learning_rate": 5.85429595814415e-06, | |
| "loss": 0.0297, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.571574381219699, | |
| "grad_norm": 1.1752480268478394, | |
| "learning_rate": 5.85154119905634e-06, | |
| "loss": 0.0288, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5741260525644297, | |
| "grad_norm": 1.0217607021331787, | |
| "learning_rate": 5.848761302262205e-06, | |
| "loss": 0.0309, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5766777239091605, | |
| "grad_norm": 2.6321234703063965, | |
| "learning_rate": 5.845956292267872e-06, | |
| "loss": 0.035, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.5792293952538913, | |
| "grad_norm": 0.8494094610214233, | |
| "learning_rate": 5.8431261938008515e-06, | |
| "loss": 0.031, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.581781066598622, | |
| "grad_norm": 1.3556350469589233, | |
| "learning_rate": 5.840271031809821e-06, | |
| "loss": 0.0305, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5843327379433529, | |
| "grad_norm": 0.7412778735160828, | |
| "learning_rate": 5.8373908314644075e-06, | |
| "loss": 0.0262, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5868844092880837, | |
| "grad_norm": 1.4868544340133667, | |
| "learning_rate": 5.83448561815496e-06, | |
| "loss": 0.0293, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5894360806328145, | |
| "grad_norm": 0.9419906735420227, | |
| "learning_rate": 5.831555417492332e-06, | |
| "loss": 0.0335, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5919877519775453, | |
| "grad_norm": 1.1363625526428223, | |
| "learning_rate": 5.8286002553076485e-06, | |
| "loss": 0.0336, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.594539423322276, | |
| "grad_norm": 2.777545213699341, | |
| "learning_rate": 5.8256201576520865e-06, | |
| "loss": 0.0306, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5970910946670069, | |
| "grad_norm": 1.014387845993042, | |
| "learning_rate": 5.822615150796639e-06, | |
| "loss": 0.0319, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.5996427660117377, | |
| "grad_norm": 1.114518404006958, | |
| "learning_rate": 5.8195852612318845e-06, | |
| "loss": 0.0329, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.6021944373564685, | |
| "grad_norm": 1.7633538246154785, | |
| "learning_rate": 5.816530515667758e-06, | |
| "loss": 0.0306, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.6047461087011993, | |
| "grad_norm": 1.5200434923171997, | |
| "learning_rate": 5.813450941033309e-06, | |
| "loss": 0.0296, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.60729778004593, | |
| "grad_norm": 0.8720308542251587, | |
| "learning_rate": 5.810346564476468e-06, | |
| "loss": 0.0282, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.6098494513906609, | |
| "grad_norm": 1.8047078847885132, | |
| "learning_rate": 5.807217413363805e-06, | |
| "loss": 0.039, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.6124011227353917, | |
| "grad_norm": 1.6201497316360474, | |
| "learning_rate": 5.804063515280296e-06, | |
| "loss": 0.0307, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.6149527940801225, | |
| "grad_norm": 1.6760756969451904, | |
| "learning_rate": 5.800884898029062e-06, | |
| "loss": 0.0254, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.6175044654248533, | |
| "grad_norm": 3.973919153213501, | |
| "learning_rate": 5.7976815896311476e-06, | |
| "loss": 0.0303, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.620056136769584, | |
| "grad_norm": 0.60660719871521, | |
| "learning_rate": 5.794453618325253e-06, | |
| "loss": 0.0292, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.6226078081143149, | |
| "grad_norm": 0.8211406469345093, | |
| "learning_rate": 5.791201012567498e-06, | |
| "loss": 0.0283, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.6251594794590457, | |
| "grad_norm": 0.6944069862365723, | |
| "learning_rate": 5.787923801031164e-06, | |
| "loss": 0.0336, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.6277111508037765, | |
| "grad_norm": 1.1745727062225342, | |
| "learning_rate": 5.78462201260645e-06, | |
| "loss": 0.0284, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.6302628221485073, | |
| "grad_norm": 1.3961211442947388, | |
| "learning_rate": 5.781295676400203e-06, | |
| "loss": 0.0327, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.632814493493238, | |
| "grad_norm": 1.1352729797363281, | |
| "learning_rate": 5.777944821735679e-06, | |
| "loss": 0.0302, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.6353661648379688, | |
| "grad_norm": 1.200098991394043, | |
| "learning_rate": 5.774569478152271e-06, | |
| "loss": 0.0287, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.6379178361826997, | |
| "grad_norm": 1.50564706325531, | |
| "learning_rate": 5.771169675405256e-06, | |
| "loss": 0.0301, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6404695075274305, | |
| "grad_norm": 1.6297107934951782, | |
| "learning_rate": 5.767745443465529e-06, | |
| "loss": 0.0313, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.6430211788721613, | |
| "grad_norm": 0.7969597578048706, | |
| "learning_rate": 5.76429681251934e-06, | |
| "loss": 0.0281, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.645572850216892, | |
| "grad_norm": 0.9594812393188477, | |
| "learning_rate": 5.7608238129680294e-06, | |
| "loss": 0.0291, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.6481245215616228, | |
| "grad_norm": 1.628527283668518, | |
| "learning_rate": 5.757326475427758e-06, | |
| "loss": 0.0288, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.6506761929063537, | |
| "grad_norm": 1.2033168077468872, | |
| "learning_rate": 5.753804830729234e-06, | |
| "loss": 0.0271, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.6532278642510845, | |
| "grad_norm": 0.5983326435089111, | |
| "learning_rate": 5.750258909917453e-06, | |
| "loss": 0.0244, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.6557795355958153, | |
| "grad_norm": 0.9838243722915649, | |
| "learning_rate": 5.746688744251408e-06, | |
| "loss": 0.0251, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.658331206940546, | |
| "grad_norm": 1.507983922958374, | |
| "learning_rate": 5.743094365203827e-06, | |
| "loss": 0.0258, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6608828782852768, | |
| "grad_norm": 1.403834581375122, | |
| "learning_rate": 5.739475804460888e-06, | |
| "loss": 0.0254, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6634345496300077, | |
| "grad_norm": 1.1948083639144897, | |
| "learning_rate": 5.735833093921944e-06, | |
| "loss": 0.0291, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6659862209747385, | |
| "grad_norm": 3.1764698028564453, | |
| "learning_rate": 5.732166265699239e-06, | |
| "loss": 0.0333, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6685378923194693, | |
| "grad_norm": 0.9992326498031616, | |
| "learning_rate": 5.7284753521176264e-06, | |
| "loss": 0.0286, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.6710895636642, | |
| "grad_norm": 0.8567758202552795, | |
| "learning_rate": 5.724760385714284e-06, | |
| "loss": 0.0266, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.6736412350089308, | |
| "grad_norm": 1.4158856868743896, | |
| "learning_rate": 5.721021399238424e-06, | |
| "loss": 0.0277, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.6761929063536617, | |
| "grad_norm": 0.7602766156196594, | |
| "learning_rate": 5.717258425651013e-06, | |
| "loss": 0.0316, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.6787445776983925, | |
| "grad_norm": 1.0612212419509888, | |
| "learning_rate": 5.71347149812447e-06, | |
| "loss": 0.0265, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.6812962490431232, | |
| "grad_norm": 2.0236854553222656, | |
| "learning_rate": 5.709660650042382e-06, | |
| "loss": 0.03, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.683847920387854, | |
| "grad_norm": 2.617422342300415, | |
| "learning_rate": 5.705825914999206e-06, | |
| "loss": 0.0318, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6863995917325848, | |
| "grad_norm": 0.8844764828681946, | |
| "learning_rate": 5.701967326799974e-06, | |
| "loss": 0.0244, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6889512630773157, | |
| "grad_norm": 0.9346625804901123, | |
| "learning_rate": 5.698084919459997e-06, | |
| "loss": 0.0313, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6915029344220465, | |
| "grad_norm": 1.3249640464782715, | |
| "learning_rate": 5.694178727204562e-06, | |
| "loss": 0.0247, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.6940546057667772, | |
| "grad_norm": 1.593597650527954, | |
| "learning_rate": 5.69024878446863e-06, | |
| "loss": 0.0294, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.696606277111508, | |
| "grad_norm": 1.0783870220184326, | |
| "learning_rate": 5.686295125896537e-06, | |
| "loss": 0.0261, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6991579484562388, | |
| "grad_norm": 1.3403818607330322, | |
| "learning_rate": 5.682317786341682e-06, | |
| "loss": 0.0259, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.7017096198009697, | |
| "grad_norm": 1.2575613260269165, | |
| "learning_rate": 5.678316800866226e-06, | |
| "loss": 0.0247, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.7042612911457005, | |
| "grad_norm": 0.5721168518066406, | |
| "learning_rate": 5.674292204740779e-06, | |
| "loss": 0.024, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.7068129624904312, | |
| "grad_norm": 0.49415379762649536, | |
| "learning_rate": 5.67024403344409e-06, | |
| "loss": 0.0265, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.709364633835162, | |
| "grad_norm": 1.5040923357009888, | |
| "learning_rate": 5.666172322662735e-06, | |
| "loss": 0.0271, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.7119163051798928, | |
| "grad_norm": 1.4604111909866333, | |
| "learning_rate": 5.662077108290802e-06, | |
| "loss": 0.0252, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.7144679765246237, | |
| "grad_norm": 4.383986473083496, | |
| "learning_rate": 5.657958426429573e-06, | |
| "loss": 0.029, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7170196478693545, | |
| "grad_norm": 1.1418787240982056, | |
| "learning_rate": 5.65381631338721e-06, | |
| "loss": 0.026, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.7195713192140852, | |
| "grad_norm": 1.178589105606079, | |
| "learning_rate": 5.64965080567843e-06, | |
| "loss": 0.0257, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.722122990558816, | |
| "grad_norm": 2.3111040592193604, | |
| "learning_rate": 5.6454619400241845e-06, | |
| "loss": 0.0287, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.7246746619035468, | |
| "grad_norm": 0.6986176371574402, | |
| "learning_rate": 5.641249753351339e-06, | |
| "loss": 0.0267, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.7272263332482776, | |
| "grad_norm": 0.7001646757125854, | |
| "learning_rate": 5.637014282792342e-06, | |
| "loss": 0.0249, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.7297780045930085, | |
| "grad_norm": 1.2817333936691284, | |
| "learning_rate": 5.632755565684905e-06, | |
| "loss": 0.0251, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.7323296759377392, | |
| "grad_norm": 0.9041908383369446, | |
| "learning_rate": 5.628473639571665e-06, | |
| "loss": 0.0252, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.73488134728247, | |
| "grad_norm": 2.7807722091674805, | |
| "learning_rate": 5.624168542199858e-06, | |
| "loss": 0.0255, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.7374330186272008, | |
| "grad_norm": 1.0662809610366821, | |
| "learning_rate": 5.619840311520989e-06, | |
| "loss": 0.0332, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.7399846899719316, | |
| "grad_norm": 1.3166412115097046, | |
| "learning_rate": 5.6154889856904894e-06, | |
| "loss": 0.0264, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7425363613166625, | |
| "grad_norm": 1.0728857517242432, | |
| "learning_rate": 5.611114603067388e-06, | |
| "loss": 0.0223, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.7450880326613932, | |
| "grad_norm": 1.4507665634155273, | |
| "learning_rate": 5.606717202213974e-06, | |
| "loss": 0.0274, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.747639704006124, | |
| "grad_norm": 0.8330174088478088, | |
| "learning_rate": 5.602296821895444e-06, | |
| "loss": 0.0256, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.7501913753508548, | |
| "grad_norm": 0.7136677503585815, | |
| "learning_rate": 5.597853501079579e-06, | |
| "loss": 0.0265, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.7527430466955856, | |
| "grad_norm": 1.0832639932632446, | |
| "learning_rate": 5.593387278936388e-06, | |
| "loss": 0.0266, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.7552947180403164, | |
| "grad_norm": 0.5012922883033752, | |
| "learning_rate": 5.588898194837764e-06, | |
| "loss": 0.0271, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.7578463893850472, | |
| "grad_norm": 1.0677125453948975, | |
| "learning_rate": 5.584386288357142e-06, | |
| "loss": 0.0282, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.760398060729778, | |
| "grad_norm": 1.0686490535736084, | |
| "learning_rate": 5.579851599269146e-06, | |
| "loss": 0.0283, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.7629497320745088, | |
| "grad_norm": 0.9940263032913208, | |
| "learning_rate": 5.575294167549241e-06, | |
| "loss": 0.0281, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.7655014034192396, | |
| "grad_norm": 1.4291343688964844, | |
| "learning_rate": 5.5707140333733775e-06, | |
| "loss": 0.0267, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7680530747639704, | |
| "grad_norm": 0.6890590786933899, | |
| "learning_rate": 5.566111237117639e-06, | |
| "loss": 0.0231, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7706047461087012, | |
| "grad_norm": 0.8809148073196411, | |
| "learning_rate": 5.5614858193578896e-06, | |
| "loss": 0.0279, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.773156417453432, | |
| "grad_norm": 1.3962548971176147, | |
| "learning_rate": 5.556837820869408e-06, | |
| "loss": 0.0266, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7757080887981628, | |
| "grad_norm": 0.9583940505981445, | |
| "learning_rate": 5.552167282626535e-06, | |
| "loss": 0.026, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7782597601428936, | |
| "grad_norm": 0.7618580460548401, | |
| "learning_rate": 5.54747424580231e-06, | |
| "loss": 0.0228, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7808114314876244, | |
| "grad_norm": 1.2238036394119263, | |
| "learning_rate": 5.5427587517681095e-06, | |
| "loss": 0.0296, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.7833631028323552, | |
| "grad_norm": 0.5979515314102173, | |
| "learning_rate": 5.53802084209328e-06, | |
| "loss": 0.0238, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.785914774177086, | |
| "grad_norm": 0.6824269890785217, | |
| "learning_rate": 5.533260558544772e-06, | |
| "loss": 0.0227, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.7884664455218168, | |
| "grad_norm": 1.1425477266311646, | |
| "learning_rate": 5.528477943086773e-06, | |
| "loss": 0.0242, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.7910181168665475, | |
| "grad_norm": 0.8494576215744019, | |
| "learning_rate": 5.523673037880338e-06, | |
| "loss": 0.0225, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.7935697882112784, | |
| "grad_norm": 1.4294337034225464, | |
| "learning_rate": 5.518845885283015e-06, | |
| "loss": 0.0227, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.7961214595560092, | |
| "grad_norm": 1.3093550205230713, | |
| "learning_rate": 5.513996527848475e-06, | |
| "loss": 0.0229, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.79867313090074, | |
| "grad_norm": 1.951343059539795, | |
| "learning_rate": 5.509125008326135e-06, | |
| "loss": 0.0277, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.8012248022454708, | |
| "grad_norm": 1.1987403631210327, | |
| "learning_rate": 5.50423136966078e-06, | |
| "loss": 0.0244, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.8037764735902015, | |
| "grad_norm": 0.8594563007354736, | |
| "learning_rate": 5.499315654992189e-06, | |
| "loss": 0.0262, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.8063281449349324, | |
| "grad_norm": 1.0187805891036987, | |
| "learning_rate": 5.494377907654748e-06, | |
| "loss": 0.0265, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.8088798162796632, | |
| "grad_norm": 0.7313008308410645, | |
| "learning_rate": 5.489418171177073e-06, | |
| "loss": 0.0225, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.811431487624394, | |
| "grad_norm": 1.7417691946029663, | |
| "learning_rate": 5.484436489281627e-06, | |
| "loss": 0.0237, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.8139831589691248, | |
| "grad_norm": 1.963599443435669, | |
| "learning_rate": 5.4794329058843285e-06, | |
| "loss": 0.0246, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.8165348303138555, | |
| "grad_norm": 1.622215986251831, | |
| "learning_rate": 5.474407465094171e-06, | |
| "loss": 0.025, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.8190865016585863, | |
| "grad_norm": 2.1439669132232666, | |
| "learning_rate": 5.469360211212832e-06, | |
| "loss": 0.023, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.8216381730033172, | |
| "grad_norm": 0.7606381177902222, | |
| "learning_rate": 5.464291188734278e-06, | |
| "loss": 0.0251, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.824189844348048, | |
| "grad_norm": 2.439307451248169, | |
| "learning_rate": 5.45920044234438e-06, | |
| "loss": 0.0238, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.8267415156927788, | |
| "grad_norm": 1.7854689359664917, | |
| "learning_rate": 5.454088016920514e-06, | |
| "loss": 0.0262, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.8292931870375095, | |
| "grad_norm": 1.1369054317474365, | |
| "learning_rate": 5.448953957531168e-06, | |
| "loss": 0.0241, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.8318448583822403, | |
| "grad_norm": 1.2106547355651855, | |
| "learning_rate": 5.4437983094355435e-06, | |
| "loss": 0.0253, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.8343965297269712, | |
| "grad_norm": 0.7193657755851746, | |
| "learning_rate": 5.438621118083155e-06, | |
| "loss": 0.022, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.836948201071702, | |
| "grad_norm": 1.0049270391464233, | |
| "learning_rate": 5.433422429113436e-06, | |
| "loss": 0.0255, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.8394998724164328, | |
| "grad_norm": 1.0903527736663818, | |
| "learning_rate": 5.428202288355326e-06, | |
| "loss": 0.0226, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.8420515437611635, | |
| "grad_norm": 1.3176894187927246, | |
| "learning_rate": 5.422960741826874e-06, | |
| "loss": 0.0242, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8446032151058943, | |
| "grad_norm": 0.8586781024932861, | |
| "learning_rate": 5.4176978357348355e-06, | |
| "loss": 0.0219, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.8471548864506252, | |
| "grad_norm": 0.8540439605712891, | |
| "learning_rate": 5.412413616474256e-06, | |
| "loss": 0.0236, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.849706557795356, | |
| "grad_norm": 0.4369836449623108, | |
| "learning_rate": 5.407108130628066e-06, | |
| "loss": 0.0201, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.8522582291400868, | |
| "grad_norm": 1.570779800415039, | |
| "learning_rate": 5.401781424966676e-06, | |
| "loss": 0.0247, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.8548099004848175, | |
| "grad_norm": 2.3272910118103027, | |
| "learning_rate": 5.396433546447557e-06, | |
| "loss": 0.0221, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.8573615718295483, | |
| "grad_norm": 1.004865288734436, | |
| "learning_rate": 5.391064542214828e-06, | |
| "loss": 0.0222, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.8599132431742792, | |
| "grad_norm": 0.9049104452133179, | |
| "learning_rate": 5.385674459598841e-06, | |
| "loss": 0.0263, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.86246491451901, | |
| "grad_norm": 1.0357730388641357, | |
| "learning_rate": 5.380263346115766e-06, | |
| "loss": 0.0224, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.8650165858637408, | |
| "grad_norm": 2.943132162094116, | |
| "learning_rate": 5.374831249467171e-06, | |
| "loss": 0.0213, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.8675682572084715, | |
| "grad_norm": 0.7794683575630188, | |
| "learning_rate": 5.369378217539595e-06, | |
| "loss": 0.0261, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8701199285532023, | |
| "grad_norm": 1.315697431564331, | |
| "learning_rate": 5.36390429840414e-06, | |
| "loss": 0.0243, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.8726715998979332, | |
| "grad_norm": 0.48824071884155273, | |
| "learning_rate": 5.358409540316032e-06, | |
| "loss": 0.0201, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.875223271242664, | |
| "grad_norm": 1.1559224128723145, | |
| "learning_rate": 5.3528939917142046e-06, | |
| "loss": 0.0203, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.8777749425873947, | |
| "grad_norm": 1.6702337265014648, | |
| "learning_rate": 5.34735770122087e-06, | |
| "loss": 0.0225, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.8803266139321255, | |
| "grad_norm": 0.8018677234649658, | |
| "learning_rate": 5.34180071764109e-06, | |
| "loss": 0.0243, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8828782852768563, | |
| "grad_norm": 1.211368203163147, | |
| "learning_rate": 5.336223089962347e-06, | |
| "loss": 0.0238, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8854299566215872, | |
| "grad_norm": 0.7445624470710754, | |
| "learning_rate": 5.33062486735411e-06, | |
| "loss": 0.0221, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.887981627966318, | |
| "grad_norm": 0.8386270403862, | |
| "learning_rate": 5.3250060991674e-06, | |
| "loss": 0.0201, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8905332993110487, | |
| "grad_norm": 0.840062141418457, | |
| "learning_rate": 5.319366834934361e-06, | |
| "loss": 0.0227, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.8930849706557795, | |
| "grad_norm": 0.7337429523468018, | |
| "learning_rate": 5.313707124367819e-06, | |
| "loss": 0.0223, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.8956366420005103, | |
| "grad_norm": 1.0676528215408325, | |
| "learning_rate": 5.30802701736084e-06, | |
| "loss": 0.0231, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.8981883133452412, | |
| "grad_norm": 0.512732982635498, | |
| "learning_rate": 5.3023265639863e-06, | |
| "loss": 0.0238, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.900739984689972, | |
| "grad_norm": 1.079931378364563, | |
| "learning_rate": 5.296605814496435e-06, | |
| "loss": 0.0217, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.9032916560347027, | |
| "grad_norm": 1.2752869129180908, | |
| "learning_rate": 5.290864819322402e-06, | |
| "loss": 0.0214, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.9058433273794335, | |
| "grad_norm": 1.5410728454589844, | |
| "learning_rate": 5.2851036290738316e-06, | |
| "loss": 0.0294, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.9083949987241643, | |
| "grad_norm": 0.7965479493141174, | |
| "learning_rate": 5.279322294538386e-06, | |
| "loss": 0.0227, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.9109466700688952, | |
| "grad_norm": 0.8509993553161621, | |
| "learning_rate": 5.273520866681309e-06, | |
| "loss": 0.0214, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.913498341413626, | |
| "grad_norm": 1.2866240739822388, | |
| "learning_rate": 5.2676993966449736e-06, | |
| "loss": 0.024, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.9160500127583567, | |
| "grad_norm": 1.3114237785339355, | |
| "learning_rate": 5.261857935748437e-06, | |
| "loss": 0.018, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.9186016841030875, | |
| "grad_norm": 1.0281429290771484, | |
| "learning_rate": 5.255996535486987e-06, | |
| "loss": 0.0217, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.9211533554478183, | |
| "grad_norm": 1.3615024089813232, | |
| "learning_rate": 5.250115247531681e-06, | |
| "loss": 0.0247, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.9237050267925491, | |
| "grad_norm": 0.7838472127914429, | |
| "learning_rate": 5.244214123728902e-06, | |
| "loss": 0.0238, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.92625669813728, | |
| "grad_norm": 0.8573798537254333, | |
| "learning_rate": 5.238293216099891e-06, | |
| "loss": 0.0222, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.9288083694820107, | |
| "grad_norm": 1.4891736507415771, | |
| "learning_rate": 5.2323525768402965e-06, | |
| "loss": 0.0236, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.9313600408267415, | |
| "grad_norm": 0.6912400722503662, | |
| "learning_rate": 5.226392258319708e-06, | |
| "loss": 0.0208, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.9339117121714723, | |
| "grad_norm": 0.5795527696609497, | |
| "learning_rate": 5.220412313081197e-06, | |
| "loss": 0.0203, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.9364633835162031, | |
| "grad_norm": 1.261696457862854, | |
| "learning_rate": 5.214412793840857e-06, | |
| "loss": 0.0214, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.939015054860934, | |
| "grad_norm": 0.7527537941932678, | |
| "learning_rate": 5.208393753487333e-06, | |
| "loss": 0.0224, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.9415667262056647, | |
| "grad_norm": 0.8399683237075806, | |
| "learning_rate": 5.20235524508136e-06, | |
| "loss": 0.0243, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.9441183975503955, | |
| "grad_norm": 0.7504599094390869, | |
| "learning_rate": 5.196297321855291e-06, | |
| "loss": 0.0203, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9466700688951263, | |
| "grad_norm": 0.9682078957557678, | |
| "learning_rate": 5.190220037212632e-06, | |
| "loss": 0.023, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.9492217402398571, | |
| "grad_norm": 0.9620463252067566, | |
| "learning_rate": 5.184123444727567e-06, | |
| "loss": 0.0217, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.951773411584588, | |
| "grad_norm": 1.114750623703003, | |
| "learning_rate": 5.178007598144491e-06, | |
| "loss": 0.0216, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.9543250829293187, | |
| "grad_norm": 0.9930930137634277, | |
| "learning_rate": 5.17187255137753e-06, | |
| "loss": 0.0179, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.9568767542740495, | |
| "grad_norm": 0.584056556224823, | |
| "learning_rate": 5.165718358510071e-06, | |
| "loss": 0.0208, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.9594284256187803, | |
| "grad_norm": 0.9347606897354126, | |
| "learning_rate": 5.159545073794281e-06, | |
| "loss": 0.0195, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.961980096963511, | |
| "grad_norm": 0.8671495318412781, | |
| "learning_rate": 5.153352751650634e-06, | |
| "loss": 0.0225, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.9645317683082419, | |
| "grad_norm": 0.8564977049827576, | |
| "learning_rate": 5.147141446667424e-06, | |
| "loss": 0.0227, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.9670834396529727, | |
| "grad_norm": 0.44186270236968994, | |
| "learning_rate": 5.1409112136002915e-06, | |
| "loss": 0.0166, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.9696351109977035, | |
| "grad_norm": 0.7479332685470581, | |
| "learning_rate": 5.134662107371736e-06, | |
| "loss": 0.0225, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9721867823424343, | |
| "grad_norm": 0.8453100919723511, | |
| "learning_rate": 5.128394183070631e-06, | |
| "loss": 0.0175, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.974738453687165, | |
| "grad_norm": 0.7815666198730469, | |
| "learning_rate": 5.122107495951743e-06, | |
| "loss": 0.022, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.9772901250318959, | |
| "grad_norm": 1.070888876914978, | |
| "learning_rate": 5.115802101435242e-06, | |
| "loss": 0.0226, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.9798417963766267, | |
| "grad_norm": 0.7922118902206421, | |
| "learning_rate": 5.109478055106209e-06, | |
| "loss": 0.0215, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.9823934677213575, | |
| "grad_norm": 0.7540226578712463, | |
| "learning_rate": 5.103135412714155e-06, | |
| "loss": 0.0188, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.9849451390660883, | |
| "grad_norm": 0.8961939811706543, | |
| "learning_rate": 5.0967742301725186e-06, | |
| "loss": 0.0208, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.987496810410819, | |
| "grad_norm": 0.642845094203949, | |
| "learning_rate": 5.090394563558184e-06, | |
| "loss": 0.0242, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.9900484817555499, | |
| "grad_norm": 0.5724070072174072, | |
| "learning_rate": 5.083996469110977e-06, | |
| "loss": 0.0182, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.9926001531002807, | |
| "grad_norm": 0.4112820327281952, | |
| "learning_rate": 5.077580003233176e-06, | |
| "loss": 0.018, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.9951518244450115, | |
| "grad_norm": 1.535103440284729, | |
| "learning_rate": 5.071145222489013e-06, | |
| "loss": 0.0219, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.9977034957897423, | |
| "grad_norm": 1.7755980491638184, | |
| "learning_rate": 5.064692183604172e-06, | |
| "loss": 0.0243, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.000255167134473, | |
| "grad_norm": 0.8194239735603333, | |
| "learning_rate": 5.058220943465293e-06, | |
| "loss": 0.0218, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.002806838479204, | |
| "grad_norm": 1.6030933856964111, | |
| "learning_rate": 5.051731559119469e-06, | |
| "loss": 0.0244, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.0053585098239346, | |
| "grad_norm": 0.6478273272514343, | |
| "learning_rate": 5.045224087773742e-06, | |
| "loss": 0.0199, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.0079101811686655, | |
| "grad_norm": 0.4480527341365814, | |
| "learning_rate": 5.038698586794598e-06, | |
| "loss": 0.0213, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.0104618525133964, | |
| "grad_norm": 1.267435908317566, | |
| "learning_rate": 5.032155113707468e-06, | |
| "loss": 0.0196, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.013013523858127, | |
| "grad_norm": 0.7024682760238647, | |
| "learning_rate": 5.02559372619621e-06, | |
| "loss": 0.0198, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.015565195202858, | |
| "grad_norm": 0.4587201774120331, | |
| "learning_rate": 5.019014482102611e-06, | |
| "loss": 0.0171, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.0181168665475886, | |
| "grad_norm": 0.8392826318740845, | |
| "learning_rate": 5.012417439425867e-06, | |
| "loss": 0.0185, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.0206685378923195, | |
| "grad_norm": 0.8017153739929199, | |
| "learning_rate": 5.0058026563220825e-06, | |
| "loss": 0.0189, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.0232202092370504, | |
| "grad_norm": 0.6147284507751465, | |
| "learning_rate": 4.99917019110375e-06, | |
| "loss": 0.0185, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.025771880581781, | |
| "grad_norm": 0.6263673305511475, | |
| "learning_rate": 4.992520102239238e-06, | |
| "loss": 0.0177, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.028323551926512, | |
| "grad_norm": 0.7895020842552185, | |
| "learning_rate": 4.985852448352276e-06, | |
| "loss": 0.0213, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.0308752232712426, | |
| "grad_norm": 0.9371950030326843, | |
| "learning_rate": 4.9791672882214394e-06, | |
| "loss": 0.0217, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.0334268946159735, | |
| "grad_norm": 2.927732467651367, | |
| "learning_rate": 4.972464680779627e-06, | |
| "loss": 0.0219, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.0359785659607044, | |
| "grad_norm": 0.7745063304901123, | |
| "learning_rate": 4.965744685113544e-06, | |
| "loss": 0.0189, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.038530237305435, | |
| "grad_norm": 0.6315968036651611, | |
| "learning_rate": 4.959007360463185e-06, | |
| "loss": 0.0187, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.041081908650166, | |
| "grad_norm": 0.3792322874069214, | |
| "learning_rate": 4.952252766221303e-06, | |
| "loss": 0.0214, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.0436335799948966, | |
| "grad_norm": 0.6313232779502869, | |
| "learning_rate": 4.9454809619328935e-06, | |
| "loss": 0.019, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.0461852513396275, | |
| "grad_norm": 0.9274890422821045, | |
| "learning_rate": 4.938692007294667e-06, | |
| "loss": 0.0195, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.0487369226843581, | |
| "grad_norm": 0.6988605856895447, | |
| "learning_rate": 4.9318859621545215e-06, | |
| "loss": 0.0175, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.051288594029089, | |
| "grad_norm": 0.5787187814712524, | |
| "learning_rate": 4.925062886511017e-06, | |
| "loss": 0.0213, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.05384026537382, | |
| "grad_norm": 0.7282422184944153, | |
| "learning_rate": 4.918222840512843e-06, | |
| "loss": 0.0206, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.0563919367185506, | |
| "grad_norm": 0.8163778781890869, | |
| "learning_rate": 4.911365884458295e-06, | |
| "loss": 0.0193, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.0589436080632815, | |
| "grad_norm": 0.7783817052841187, | |
| "learning_rate": 4.904492078794736e-06, | |
| "loss": 0.0172, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.0614952794080121, | |
| "grad_norm": 2.5214037895202637, | |
| "learning_rate": 4.8976014841180664e-06, | |
| "loss": 0.0207, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.064046950752743, | |
| "grad_norm": 1.9668519496917725, | |
| "learning_rate": 4.890694161172191e-06, | |
| "loss": 0.0179, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.066598622097474, | |
| "grad_norm": 0.6166751980781555, | |
| "learning_rate": 4.883770170848479e-06, | |
| "loss": 0.0187, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.0691502934422046, | |
| "grad_norm": 0.41448432207107544, | |
| "learning_rate": 4.876829574185239e-06, | |
| "loss": 0.0188, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.0717019647869355, | |
| "grad_norm": 1.5775961875915527, | |
| "learning_rate": 4.869872432367162e-06, | |
| "loss": 0.0199, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.0742536361316661, | |
| "grad_norm": 0.5199787616729736, | |
| "learning_rate": 4.862898806724798e-06, | |
| "loss": 0.0169, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.076805307476397, | |
| "grad_norm": 0.7429872155189514, | |
| "learning_rate": 4.85590875873401e-06, | |
| "loss": 0.0194, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.079356978821128, | |
| "grad_norm": 0.6427246928215027, | |
| "learning_rate": 4.848902350015431e-06, | |
| "loss": 0.0182, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.0819086501658586, | |
| "grad_norm": 0.4715655446052551, | |
| "learning_rate": 4.8418796423339205e-06, | |
| "loss": 0.0171, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.0844603215105895, | |
| "grad_norm": 1.0023016929626465, | |
| "learning_rate": 4.834840697598024e-06, | |
| "loss": 0.0184, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.0870119928553201, | |
| "grad_norm": 0.6949712038040161, | |
| "learning_rate": 4.827785577859422e-06, | |
| "loss": 0.0178, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.089563664200051, | |
| "grad_norm": 1.3930619955062866, | |
| "learning_rate": 4.820714345312386e-06, | |
| "loss": 0.0191, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.092115335544782, | |
| "grad_norm": 0.597090482711792, | |
| "learning_rate": 4.813627062293229e-06, | |
| "loss": 0.0197, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.0946670068895126, | |
| "grad_norm": 0.7542505264282227, | |
| "learning_rate": 4.80652379127976e-06, | |
| "loss": 0.0194, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.0972186782342435, | |
| "grad_norm": 0.7412875890731812, | |
| "learning_rate": 4.799404594890724e-06, | |
| "loss": 0.0177, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.0997703495789741, | |
| "grad_norm": 0.8048778772354126, | |
| "learning_rate": 4.792269535885262e-06, | |
| "loss": 0.0203, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.102322020923705, | |
| "grad_norm": 0.758451521396637, | |
| "learning_rate": 4.7851186771623474e-06, | |
| "loss": 0.0198, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.104873692268436, | |
| "grad_norm": 0.5495659708976746, | |
| "learning_rate": 4.777952081760237e-06, | |
| "loss": 0.0192, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.1074253636131666, | |
| "grad_norm": 0.3236237168312073, | |
| "learning_rate": 4.770769812855914e-06, | |
| "loss": 0.0183, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.1099770349578975, | |
| "grad_norm": 0.42418259382247925, | |
| "learning_rate": 4.763571933764533e-06, | |
| "loss": 0.0184, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.1125287063026281, | |
| "grad_norm": 1.0733011960983276, | |
| "learning_rate": 4.756358507938854e-06, | |
| "loss": 0.0181, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.115080377647359, | |
| "grad_norm": 3.533398151397705, | |
| "learning_rate": 4.749129598968697e-06, | |
| "loss": 0.0168, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.11763204899209, | |
| "grad_norm": 1.1660666465759277, | |
| "learning_rate": 4.741885270580367e-06, | |
| "loss": 0.0173, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.1201837203368206, | |
| "grad_norm": 1.2423667907714844, | |
| "learning_rate": 4.734625586636103e-06, | |
| "loss": 0.0147, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.1227353916815515, | |
| "grad_norm": 1.0858911275863647, | |
| "learning_rate": 4.727350611133506e-06, | |
| "loss": 0.0182, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.1252870630262821, | |
| "grad_norm": 0.932620644569397, | |
| "learning_rate": 4.720060408204983e-06, | |
| "loss": 0.0227, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.127838734371013, | |
| "grad_norm": 1.4234601259231567, | |
| "learning_rate": 4.712755042117177e-06, | |
| "loss": 0.0195, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.130390405715744, | |
| "grad_norm": 0.46929091215133667, | |
| "learning_rate": 4.705434577270402e-06, | |
| "loss": 0.0177, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.1329420770604746, | |
| "grad_norm": 1.3635838031768799, | |
| "learning_rate": 4.698099078198074e-06, | |
| "loss": 0.0227, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.1354937484052054, | |
| "grad_norm": 0.6974967122077942, | |
| "learning_rate": 4.690748609566143e-06, | |
| "loss": 0.0172, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.1380454197499361, | |
| "grad_norm": 0.6230807900428772, | |
| "learning_rate": 4.683383236172523e-06, | |
| "loss": 0.018, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.140597091094667, | |
| "grad_norm": 0.4532192647457123, | |
| "learning_rate": 4.676003022946522e-06, | |
| "loss": 0.0157, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.143148762439398, | |
| "grad_norm": 3.6540305614471436, | |
| "learning_rate": 4.668608034948266e-06, | |
| "loss": 0.0176, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.1457004337841286, | |
| "grad_norm": 0.5863398313522339, | |
| "learning_rate": 4.661198337368129e-06, | |
| "loss": 0.0179, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.1482521051288594, | |
| "grad_norm": 1.5659875869750977, | |
| "learning_rate": 4.6537739955261566e-06, | |
| "loss": 0.0199, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.1508037764735901, | |
| "grad_norm": 1.0172702074050903, | |
| "learning_rate": 4.646335074871491e-06, | |
| "loss": 0.0184, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.153355447818321, | |
| "grad_norm": 0.26044878363609314, | |
| "learning_rate": 4.638881640981793e-06, | |
| "loss": 0.016, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.1559071191630519, | |
| "grad_norm": 1.071213722229004, | |
| "learning_rate": 4.631413759562666e-06, | |
| "loss": 0.0177, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.1584587905077826, | |
| "grad_norm": 1.1978178024291992, | |
| "learning_rate": 4.623931496447074e-06, | |
| "loss": 0.0188, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.1610104618525134, | |
| "grad_norm": 2.745671272277832, | |
| "learning_rate": 4.616434917594762e-06, | |
| "loss": 0.0233, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.163562133197244, | |
| "grad_norm": 0.8523492217063904, | |
| "learning_rate": 4.6089240890916766e-06, | |
| "loss": 0.0187, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.166113804541975, | |
| "grad_norm": 1.745078444480896, | |
| "learning_rate": 4.6013990771493815e-06, | |
| "loss": 0.0223, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.1686654758867059, | |
| "grad_norm": 0.6648458242416382, | |
| "learning_rate": 4.593859948104475e-06, | |
| "loss": 0.0185, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.1712171472314366, | |
| "grad_norm": 0.840826690196991, | |
| "learning_rate": 4.586306768418001e-06, | |
| "loss": 0.019, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.1737688185761674, | |
| "grad_norm": 1.2461105585098267, | |
| "learning_rate": 4.57873960467487e-06, | |
| "loss": 0.0175, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.176320489920898, | |
| "grad_norm": 0.7605074048042297, | |
| "learning_rate": 4.571158523583269e-06, | |
| "loss": 0.0153, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.178872161265629, | |
| "grad_norm": 0.8275895714759827, | |
| "learning_rate": 4.563563591974071e-06, | |
| "loss": 0.0175, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.1814238326103599, | |
| "grad_norm": 0.8980046510696411, | |
| "learning_rate": 4.555954876800248e-06, | |
| "loss": 0.02, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.1839755039550905, | |
| "grad_norm": 0.9885419011116028, | |
| "learning_rate": 4.548332445136283e-06, | |
| "loss": 0.017, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.1865271752998214, | |
| "grad_norm": 0.5855565667152405, | |
| "learning_rate": 4.540696364177575e-06, | |
| "loss": 0.0174, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.189078846644552, | |
| "grad_norm": 0.6320938467979431, | |
| "learning_rate": 4.533046701239845e-06, | |
| "loss": 0.0179, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.191630517989283, | |
| "grad_norm": 0.8237224817276001, | |
| "learning_rate": 4.525383523758553e-06, | |
| "loss": 0.0188, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.1941821893340139, | |
| "grad_norm": 0.49340859055519104, | |
| "learning_rate": 4.51770689928829e-06, | |
| "loss": 0.0159, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.1967338606787445, | |
| "grad_norm": 0.900672972202301, | |
| "learning_rate": 4.510016895502191e-06, | |
| "loss": 0.0167, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.1992855320234754, | |
| "grad_norm": 1.2280319929122925, | |
| "learning_rate": 4.502313580191334e-06, | |
| "loss": 0.0172, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.201837203368206, | |
| "grad_norm": 1.2630693912506104, | |
| "learning_rate": 4.494597021264149e-06, | |
| "loss": 0.0183, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.204388874712937, | |
| "grad_norm": 0.5751070380210876, | |
| "learning_rate": 4.48686728674581e-06, | |
| "loss": 0.0172, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.2069405460576679, | |
| "grad_norm": 0.8484153151512146, | |
| "learning_rate": 4.479124444777644e-06, | |
| "loss": 0.0183, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.2094922174023985, | |
| "grad_norm": 0.9844270944595337, | |
| "learning_rate": 4.4713685636165244e-06, | |
| "loss": 0.0203, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.2120438887471294, | |
| "grad_norm": 0.38548630475997925, | |
| "learning_rate": 4.463599711634269e-06, | |
| "loss": 0.019, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.21459556009186, | |
| "grad_norm": 0.5281028747558594, | |
| "learning_rate": 4.455817957317046e-06, | |
| "loss": 0.0176, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.217147231436591, | |
| "grad_norm": 0.24428775906562805, | |
| "learning_rate": 4.448023369264758e-06, | |
| "loss": 0.0187, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.2196989027813219, | |
| "grad_norm": 0.9260575771331787, | |
| "learning_rate": 4.440216016190445e-06, | |
| "loss": 0.0184, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.2222505741260525, | |
| "grad_norm": 0.6549846529960632, | |
| "learning_rate": 4.432395966919679e-06, | |
| "loss": 0.0152, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.2248022454707834, | |
| "grad_norm": 1.136384129524231, | |
| "learning_rate": 4.424563290389952e-06, | |
| "loss": 0.015, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.227353916815514, | |
| "grad_norm": 1.2056633234024048, | |
| "learning_rate": 4.416718055650073e-06, | |
| "loss": 0.0161, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.229905588160245, | |
| "grad_norm": 0.66669762134552, | |
| "learning_rate": 4.408860331859556e-06, | |
| "loss": 0.0202, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.2324572595049759, | |
| "grad_norm": 0.6749626398086548, | |
| "learning_rate": 4.400990188288015e-06, | |
| "loss": 0.0207, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.2350089308497065, | |
| "grad_norm": 2.977775812149048, | |
| "learning_rate": 4.393107694314547e-06, | |
| "loss": 0.0158, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.2375606021944374, | |
| "grad_norm": 0.4771825671195984, | |
| "learning_rate": 4.3852129194271264e-06, | |
| "loss": 0.0181, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.240112273539168, | |
| "grad_norm": 0.4319264888763428, | |
| "learning_rate": 4.377305933221988e-06, | |
| "loss": 0.0166, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.242663944883899, | |
| "grad_norm": 0.7092644572257996, | |
| "learning_rate": 4.369386805403016e-06, | |
| "loss": 0.0162, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.2452156162286299, | |
| "grad_norm": 1.278566598892212, | |
| "learning_rate": 4.361455605781128e-06, | |
| "loss": 0.0169, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.2477672875733605, | |
| "grad_norm": 0.9039450287818909, | |
| "learning_rate": 4.353512404273663e-06, | |
| "loss": 0.0193, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.2503189589180914, | |
| "grad_norm": 0.6819228529930115, | |
| "learning_rate": 4.3455572709037585e-06, | |
| "loss": 0.0171, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.252870630262822, | |
| "grad_norm": 0.428912878036499, | |
| "learning_rate": 4.33759027579974e-06, | |
| "loss": 0.0174, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.255422301607553, | |
| "grad_norm": 1.0547304153442383, | |
| "learning_rate": 4.329611489194497e-06, | |
| "loss": 0.0142, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.2579739729522839, | |
| "grad_norm": 0.7328991293907166, | |
| "learning_rate": 4.321620981424871e-06, | |
| "loss": 0.0165, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.2605256442970145, | |
| "grad_norm": 1.1251776218414307, | |
| "learning_rate": 4.313618822931027e-06, | |
| "loss": 0.0155, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.2630773156417454, | |
| "grad_norm": 0.44651368260383606, | |
| "learning_rate": 4.30560508425584e-06, | |
| "loss": 0.0161, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.265628986986476, | |
| "grad_norm": 0.5329874753952026, | |
| "learning_rate": 4.297579836044266e-06, | |
| "loss": 0.0174, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.268180658331207, | |
| "grad_norm": 0.8915277123451233, | |
| "learning_rate": 4.289543149042729e-06, | |
| "loss": 0.0179, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.2707323296759379, | |
| "grad_norm": 1.3271677494049072, | |
| "learning_rate": 4.281495094098486e-06, | |
| "loss": 0.0178, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.2732840010206685, | |
| "grad_norm": 0.7584212422370911, | |
| "learning_rate": 4.27343574215901e-06, | |
| "loss": 0.0185, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.2758356723653994, | |
| "grad_norm": 1.139729380607605, | |
| "learning_rate": 4.265365164271361e-06, | |
| "loss": 0.0187, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.27838734371013, | |
| "grad_norm": 0.5593457221984863, | |
| "learning_rate": 4.257283431581564e-06, | |
| "loss": 0.0175, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.280939015054861, | |
| "grad_norm": 0.9672866463661194, | |
| "learning_rate": 4.249190615333977e-06, | |
| "loss": 0.0173, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.2834906863995919, | |
| "grad_norm": 0.8238097429275513, | |
| "learning_rate": 4.241086786870664e-06, | |
| "loss": 0.0183, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.2860423577443225, | |
| "grad_norm": 0.30412089824676514, | |
| "learning_rate": 4.232972017630771e-06, | |
| "loss": 0.0164, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.2885940290890534, | |
| "grad_norm": 1.4792495965957642, | |
| "learning_rate": 4.224846379149886e-06, | |
| "loss": 0.0163, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.291145700433784, | |
| "grad_norm": 0.5357837677001953, | |
| "learning_rate": 4.216709943059421e-06, | |
| "loss": 0.0145, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.293697371778515, | |
| "grad_norm": 0.684822142124176, | |
| "learning_rate": 4.208562781085969e-06, | |
| "loss": 0.0191, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.2962490431232458, | |
| "grad_norm": 0.3603987693786621, | |
| "learning_rate": 4.200404965050679e-06, | |
| "loss": 0.0176, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.2988007144679765, | |
| "grad_norm": 0.723609447479248, | |
| "learning_rate": 4.192236566868622e-06, | |
| "loss": 0.0198, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.3013523858127074, | |
| "grad_norm": 0.43119895458221436, | |
| "learning_rate": 4.184057658548154e-06, | |
| "loss": 0.0155, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.303904057157438, | |
| "grad_norm": 0.3890657126903534, | |
| "learning_rate": 4.175868312190281e-06, | |
| "loss": 0.014, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.306455728502169, | |
| "grad_norm": 4.158993721008301, | |
| "learning_rate": 4.167668599988031e-06, | |
| "loss": 0.0182, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.3090073998468998, | |
| "grad_norm": 0.766471266746521, | |
| "learning_rate": 4.159458594225806e-06, | |
| "loss": 0.0157, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.3115590711916305, | |
| "grad_norm": 0.8652639389038086, | |
| "learning_rate": 4.1512383672787536e-06, | |
| "loss": 0.0131, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.3141107425363614, | |
| "grad_norm": 0.40679681301116943, | |
| "learning_rate": 4.143007991612126e-06, | |
| "loss": 0.0182, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.316662413881092, | |
| "grad_norm": 0.6223965883255005, | |
| "learning_rate": 4.134767539780641e-06, | |
| "loss": 0.0155, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.319214085225823, | |
| "grad_norm": 0.5270498394966125, | |
| "learning_rate": 4.126517084427841e-06, | |
| "loss": 0.0149, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.3217657565705538, | |
| "grad_norm": 0.554425060749054, | |
| "learning_rate": 4.118256698285457e-06, | |
| "loss": 0.0155, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.3243174279152845, | |
| "grad_norm": 0.7247564196586609, | |
| "learning_rate": 4.109986454172762e-06, | |
| "loss": 0.0162, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.3268690992600152, | |
| "grad_norm": 0.6803126335144043, | |
| "learning_rate": 4.101706424995937e-06, | |
| "loss": 0.0166, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.329420770604746, | |
| "grad_norm": 1.2059119939804077, | |
| "learning_rate": 4.093416683747414e-06, | |
| "loss": 0.0171, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.331972441949477, | |
| "grad_norm": 0.3244284689426422, | |
| "learning_rate": 4.08511730350525e-06, | |
| "loss": 0.0188, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.3345241132942078, | |
| "grad_norm": 0.6303139925003052, | |
| "learning_rate": 4.07680835743247e-06, | |
| "loss": 0.0154, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.3370757846389385, | |
| "grad_norm": 0.5484089851379395, | |
| "learning_rate": 4.068489918776429e-06, | |
| "loss": 0.0158, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.3396274559836692, | |
| "grad_norm": 0.4632198214530945, | |
| "learning_rate": 4.060162060868159e-06, | |
| "loss": 0.0155, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.3421791273284, | |
| "grad_norm": 0.6339452266693115, | |
| "learning_rate": 4.051824857121733e-06, | |
| "loss": 0.0178, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.344730798673131, | |
| "grad_norm": 0.6535425782203674, | |
| "learning_rate": 4.04347838103361e-06, | |
| "loss": 0.0167, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.3472824700178618, | |
| "grad_norm": 0.40795567631721497, | |
| "learning_rate": 4.0351227061819856e-06, | |
| "loss": 0.0159, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.3498341413625925, | |
| "grad_norm": 0.9828789830207825, | |
| "learning_rate": 4.026757906226153e-06, | |
| "loss": 0.0148, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.3523858127073232, | |
| "grad_norm": 0.3722200393676758, | |
| "learning_rate": 4.0183840549058445e-06, | |
| "loss": 0.0153, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.354937484052054, | |
| "grad_norm": 0.7231210470199585, | |
| "learning_rate": 4.010001226040581e-06, | |
| "loss": 0.0183, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.357489155396785, | |
| "grad_norm": 0.30369916558265686, | |
| "learning_rate": 4.001609493529033e-06, | |
| "loss": 0.0149, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.3600408267415158, | |
| "grad_norm": 0.5192328691482544, | |
| "learning_rate": 3.993208931348353e-06, | |
| "loss": 0.0152, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.3625924980862465, | |
| "grad_norm": 1.303830862045288, | |
| "learning_rate": 3.984799613553535e-06, | |
| "loss": 0.015, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.3651441694309772, | |
| "grad_norm": 0.4460931122303009, | |
| "learning_rate": 3.976381614276759e-06, | |
| "loss": 0.015, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.367695840775708, | |
| "grad_norm": 0.687340497970581, | |
| "learning_rate": 3.967955007726734e-06, | |
| "loss": 0.0184, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.370247512120439, | |
| "grad_norm": 0.5577728748321533, | |
| "learning_rate": 3.959519868188046e-06, | |
| "loss": 0.0154, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.3727991834651696, | |
| "grad_norm": 0.2629478871822357, | |
| "learning_rate": 3.9510762700205065e-06, | |
| "loss": 0.0143, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.3753508548099005, | |
| "grad_norm": 0.48370009660720825, | |
| "learning_rate": 3.942624287658492e-06, | |
| "loss": 0.0161, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.3779025261546312, | |
| "grad_norm": 0.7703590393066406, | |
| "learning_rate": 3.934163995610287e-06, | |
| "loss": 0.0135, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.380454197499362, | |
| "grad_norm": 0.46242403984069824, | |
| "learning_rate": 3.9256954684574355e-06, | |
| "loss": 0.0175, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.383005868844093, | |
| "grad_norm": 0.5483065843582153, | |
| "learning_rate": 3.917218780854075e-06, | |
| "loss": 0.015, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.3855575401888236, | |
| "grad_norm": 0.3543405830860138, | |
| "learning_rate": 3.9087340075262766e-06, | |
| "loss": 0.0153, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.3881092115335545, | |
| "grad_norm": 0.2908279001712799, | |
| "learning_rate": 3.900241223271402e-06, | |
| "loss": 0.0121, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.3906608828782852, | |
| "grad_norm": 0.5572682619094849, | |
| "learning_rate": 3.891740502957423e-06, | |
| "loss": 0.0151, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.393212554223016, | |
| "grad_norm": 0.6333874464035034, | |
| "learning_rate": 3.883231921522275e-06, | |
| "loss": 0.0168, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.395764225567747, | |
| "grad_norm": 2.3678176403045654, | |
| "learning_rate": 3.874715553973195e-06, | |
| "loss": 0.0146, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.3983158969124776, | |
| "grad_norm": 0.8847137093544006, | |
| "learning_rate": 3.866191475386055e-06, | |
| "loss": 0.0157, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.4008675682572085, | |
| "grad_norm": 0.9330421686172485, | |
| "learning_rate": 3.857659760904705e-06, | |
| "loss": 0.0178, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.4034192396019392, | |
| "grad_norm": 0.6955951452255249, | |
| "learning_rate": 3.849120485740311e-06, | |
| "loss": 0.0185, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.40597091094667, | |
| "grad_norm": 0.7250450849533081, | |
| "learning_rate": 3.840573725170686e-06, | |
| "loss": 0.0132, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.408522582291401, | |
| "grad_norm": 0.7257997989654541, | |
| "learning_rate": 3.832019554539634e-06, | |
| "loss": 0.0168, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.4110742536361316, | |
| "grad_norm": 1.2495430707931519, | |
| "learning_rate": 3.82345804925628e-06, | |
| "loss": 0.0166, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.4136259249808625, | |
| "grad_norm": 1.9423291683197021, | |
| "learning_rate": 3.814889284794409e-06, | |
| "loss": 0.0139, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.4161775963255931, | |
| "grad_norm": 0.7038158774375916, | |
| "learning_rate": 3.8063133366917976e-06, | |
| "loss": 0.0178, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.418729267670324, | |
| "grad_norm": 0.39670151472091675, | |
| "learning_rate": 3.797730280549552e-06, | |
| "loss": 0.0164, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.421280939015055, | |
| "grad_norm": 1.0969537496566772, | |
| "learning_rate": 3.7891401920314363e-06, | |
| "loss": 0.0166, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.4238326103597856, | |
| "grad_norm": 1.4217796325683594, | |
| "learning_rate": 3.7805431468632105e-06, | |
| "loss": 0.0176, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.4263842817045165, | |
| "grad_norm": 0.8468742370605469, | |
| "learning_rate": 3.77193922083196e-06, | |
| "loss": 0.0147, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.4289359530492471, | |
| "grad_norm": 0.4837898910045624, | |
| "learning_rate": 3.763328489785428e-06, | |
| "loss": 0.0167, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.431487624393978, | |
| "grad_norm": 0.7742527723312378, | |
| "learning_rate": 3.7547110296313486e-06, | |
| "loss": 0.0151, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.434039295738709, | |
| "grad_norm": 0.7425050735473633, | |
| "learning_rate": 3.7460869163367744e-06, | |
| "loss": 0.0137, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.4365909670834396, | |
| "grad_norm": 0.8111595511436462, | |
| "learning_rate": 3.7374562259274093e-06, | |
| "loss": 0.0153, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.4391426384281705, | |
| "grad_norm": 2.8571298122406006, | |
| "learning_rate": 3.728819034486939e-06, | |
| "loss": 0.0172, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.4416943097729011, | |
| "grad_norm": 0.4270651042461395, | |
| "learning_rate": 3.720175418156356e-06, | |
| "loss": 0.0156, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.444245981117632, | |
| "grad_norm": 0.715120792388916, | |
| "learning_rate": 3.711525453133295e-06, | |
| "loss": 0.015, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.446797652462363, | |
| "grad_norm": 0.7231752872467041, | |
| "learning_rate": 3.7028692156713536e-06, | |
| "loss": 0.0133, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.4493493238070936, | |
| "grad_norm": 2.8344600200653076, | |
| "learning_rate": 3.694206782079427e-06, | |
| "loss": 0.0147, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.4519009951518245, | |
| "grad_norm": 0.8471620678901672, | |
| "learning_rate": 3.68553822872103e-06, | |
| "loss": 0.015, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.4544526664965551, | |
| "grad_norm": 1.0441292524337769, | |
| "learning_rate": 3.6768636320136284e-06, | |
| "loss": 0.0147, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.457004337841286, | |
| "grad_norm": 0.5474273562431335, | |
| "learning_rate": 3.6681830684279616e-06, | |
| "loss": 0.0144, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.459556009186017, | |
| "grad_norm": 0.8801978826522827, | |
| "learning_rate": 3.6594966144873693e-06, | |
| "loss": 0.0145, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.4621076805307476, | |
| "grad_norm": 0.8165146708488464, | |
| "learning_rate": 3.6508043467671185e-06, | |
| "loss": 0.0178, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.4646593518754785, | |
| "grad_norm": 1.9424936771392822, | |
| "learning_rate": 3.642106341893728e-06, | |
| "loss": 0.0127, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.4672110232202091, | |
| "grad_norm": 0.49900662899017334, | |
| "learning_rate": 3.6334026765442917e-06, | |
| "loss": 0.0134, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.46976269456494, | |
| "grad_norm": 0.8361905217170715, | |
| "learning_rate": 3.6246934274458025e-06, | |
| "loss": 0.0148, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.472314365909671, | |
| "grad_norm": 2.2165048122406006, | |
| "learning_rate": 3.615978671374478e-06, | |
| "loss": 0.0158, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.4748660372544016, | |
| "grad_norm": 0.5519954562187195, | |
| "learning_rate": 3.6072584851550814e-06, | |
| "loss": 0.0163, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.4774177085991325, | |
| "grad_norm": 0.5470237135887146, | |
| "learning_rate": 3.598532945660246e-06, | |
| "loss": 0.0162, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.4799693799438631, | |
| "grad_norm": 0.6911956071853638, | |
| "learning_rate": 3.589802129809797e-06, | |
| "loss": 0.0156, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.482521051288594, | |
| "grad_norm": 0.6574975848197937, | |
| "learning_rate": 3.5810661145700722e-06, | |
| "loss": 0.0163, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.485072722633325, | |
| "grad_norm": 1.735755205154419, | |
| "learning_rate": 3.5723249769532454e-06, | |
| "loss": 0.0172, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.4876243939780556, | |
| "grad_norm": 0.5131850838661194, | |
| "learning_rate": 3.563578794016646e-06, | |
| "loss": 0.0155, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.4901760653227865, | |
| "grad_norm": 2.086827516555786, | |
| "learning_rate": 3.554827642862081e-06, | |
| "loss": 0.0157, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.4927277366675171, | |
| "grad_norm": 1.4139020442962646, | |
| "learning_rate": 3.546071600635154e-06, | |
| "loss": 0.014, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.495279408012248, | |
| "grad_norm": 0.5393047332763672, | |
| "learning_rate": 3.537310744524587e-06, | |
| "loss": 0.0177, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.497831079356979, | |
| "grad_norm": 0.30612704157829285, | |
| "learning_rate": 3.528545151761536e-06, | |
| "loss": 0.0121, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.5003827507017096, | |
| "grad_norm": 0.40969327092170715, | |
| "learning_rate": 3.5197748996189154e-06, | |
| "loss": 0.0139, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.5029344220464405, | |
| "grad_norm": 0.7444477677345276, | |
| "learning_rate": 3.5110000654107145e-06, | |
| "loss": 0.0163, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.5054860933911711, | |
| "grad_norm": 1.087607502937317, | |
| "learning_rate": 3.5022207264913127e-06, | |
| "loss": 0.0144, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.508037764735902, | |
| "grad_norm": 0.7293539047241211, | |
| "learning_rate": 3.493436960254803e-06, | |
| "loss": 0.0152, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.510589436080633, | |
| "grad_norm": 1.0831620693206787, | |
| "learning_rate": 3.484648844134309e-06, | |
| "loss": 0.0165, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.5131411074253636, | |
| "grad_norm": 0.7094727754592896, | |
| "learning_rate": 3.475856455601296e-06, | |
| "loss": 0.0139, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.5156927787700945, | |
| "grad_norm": 0.6312902569770813, | |
| "learning_rate": 3.4670598721648956e-06, | |
| "loss": 0.0154, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.5182444501148251, | |
| "grad_norm": 0.8989167213439941, | |
| "learning_rate": 3.4582591713712205e-06, | |
| "loss": 0.0161, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.520796121459556, | |
| "grad_norm": 0.4361861050128937, | |
| "learning_rate": 3.449454430802677e-06, | |
| "loss": 0.014, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.523347792804287, | |
| "grad_norm": 0.8975860476493835, | |
| "learning_rate": 3.4406457280772854e-06, | |
| "loss": 0.0121, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.5258994641490176, | |
| "grad_norm": 0.9588630795478821, | |
| "learning_rate": 3.431833140847996e-06, | |
| "loss": 0.0141, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.5284511354937484, | |
| "grad_norm": 0.75777667760849, | |
| "learning_rate": 3.4230167468019992e-06, | |
| "loss": 0.0184, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.5310028068384791, | |
| "grad_norm": 0.8160403966903687, | |
| "learning_rate": 3.4141966236600466e-06, | |
| "loss": 0.0152, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.53355447818321, | |
| "grad_norm": 0.3753710091114044, | |
| "learning_rate": 3.4053728491757654e-06, | |
| "loss": 0.0138, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.536106149527941, | |
| "grad_norm": 0.3643024265766144, | |
| "learning_rate": 3.3965455011349656e-06, | |
| "loss": 0.014, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.5386578208726716, | |
| "grad_norm": 0.9779283404350281, | |
| "learning_rate": 3.387714657354965e-06, | |
| "loss": 0.0169, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.5412094922174024, | |
| "grad_norm": 0.593742847442627, | |
| "learning_rate": 3.378880395683898e-06, | |
| "loss": 0.0158, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.543761163562133, | |
| "grad_norm": 0.9815357327461243, | |
| "learning_rate": 3.370042794000025e-06, | |
| "loss": 0.0166, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.546312834906864, | |
| "grad_norm": 0.4922243356704712, | |
| "learning_rate": 3.3612019302110528e-06, | |
| "loss": 0.0149, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.5488645062515949, | |
| "grad_norm": 0.8108282685279846, | |
| "learning_rate": 3.352357882253447e-06, | |
| "loss": 0.0171, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.5514161775963256, | |
| "grad_norm": 0.6546838283538818, | |
| "learning_rate": 3.343510728091742e-06, | |
| "loss": 0.0158, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.5539678489410564, | |
| "grad_norm": 0.35426297783851624, | |
| "learning_rate": 3.3346605457178506e-06, | |
| "loss": 0.0128, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.556519520285787, | |
| "grad_norm": 0.5806670188903809, | |
| "learning_rate": 3.325807413150388e-06, | |
| "loss": 0.0137, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.559071191630518, | |
| "grad_norm": 0.43077072501182556, | |
| "learning_rate": 3.3169514084339723e-06, | |
| "loss": 0.0143, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.5616228629752489, | |
| "grad_norm": 0.6155886650085449, | |
| "learning_rate": 3.30809260963854e-06, | |
| "loss": 0.0134, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.5641745343199795, | |
| "grad_norm": 0.400890588760376, | |
| "learning_rate": 3.2992310948586636e-06, | |
| "loss": 0.0148, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.5667262056647104, | |
| "grad_norm": 0.9763600826263428, | |
| "learning_rate": 3.290366942212855e-06, | |
| "loss": 0.012, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.569277877009441, | |
| "grad_norm": 0.8423333764076233, | |
| "learning_rate": 3.281500229842878e-06, | |
| "loss": 0.0145, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.571829548354172, | |
| "grad_norm": 0.7116320133209229, | |
| "learning_rate": 3.2726310359130654e-06, | |
| "loss": 0.0149, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.5743812196989029, | |
| "grad_norm": 0.6901907920837402, | |
| "learning_rate": 3.2637594386096266e-06, | |
| "loss": 0.0138, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.5769328910436335, | |
| "grad_norm": 0.2646922767162323, | |
| "learning_rate": 3.2548855161399535e-06, | |
| "loss": 0.0176, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.5794845623883644, | |
| "grad_norm": 1.581405758857727, | |
| "learning_rate": 3.24600934673194e-06, | |
| "loss": 0.0148, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.582036233733095, | |
| "grad_norm": 0.8435882925987244, | |
| "learning_rate": 3.2371310086332856e-06, | |
| "loss": 0.0131, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.584587905077826, | |
| "grad_norm": 0.5191860198974609, | |
| "learning_rate": 3.2282505801108067e-06, | |
| "loss": 0.0142, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.5871395764225569, | |
| "grad_norm": 0.8106646537780762, | |
| "learning_rate": 3.219368139449752e-06, | |
| "loss": 0.015, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.5896912477672875, | |
| "grad_norm": 0.6545732617378235, | |
| "learning_rate": 3.2104837649531035e-06, | |
| "loss": 0.0139, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.5922429191120182, | |
| "grad_norm": 0.6558523774147034, | |
| "learning_rate": 3.2015975349408924e-06, | |
| "loss": 0.0123, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.594794590456749, | |
| "grad_norm": 0.5409281253814697, | |
| "learning_rate": 3.192709527749509e-06, | |
| "loss": 0.0135, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.59734626180148, | |
| "grad_norm": 1.8452955484390259, | |
| "learning_rate": 3.183819821731009e-06, | |
| "loss": 0.0145, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.5998979331462109, | |
| "grad_norm": 1.8067026138305664, | |
| "learning_rate": 3.1749284952524226e-06, | |
| "loss": 0.016, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.6024496044909415, | |
| "grad_norm": 0.5791333913803101, | |
| "learning_rate": 3.1660356266950677e-06, | |
| "loss": 0.0152, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.6050012758356722, | |
| "grad_norm": 0.9250419735908508, | |
| "learning_rate": 3.157141294453855e-06, | |
| "loss": 0.0156, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.607552947180403, | |
| "grad_norm": 0.522924542427063, | |
| "learning_rate": 3.1482455769365977e-06, | |
| "loss": 0.0125, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.610104618525134, | |
| "grad_norm": 0.4562835991382599, | |
| "learning_rate": 3.139348552563323e-06, | |
| "loss": 0.0143, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.6126562898698649, | |
| "grad_norm": 0.5292758941650391, | |
| "learning_rate": 3.1304502997655765e-06, | |
| "loss": 0.0126, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.6152079612145955, | |
| "grad_norm": 0.39967814087867737, | |
| "learning_rate": 3.1215508969857334e-06, | |
| "loss": 0.0123, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.6177596325593262, | |
| "grad_norm": 0.47947466373443604, | |
| "learning_rate": 3.1126504226763073e-06, | |
| "loss": 0.014, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.620311303904057, | |
| "grad_norm": 0.7605049014091492, | |
| "learning_rate": 3.103748955299258e-06, | |
| "loss": 0.0162, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.622862975248788, | |
| "grad_norm": 0.7074354290962219, | |
| "learning_rate": 3.0948465733252974e-06, | |
| "loss": 0.0143, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.6254146465935189, | |
| "grad_norm": 1.39603853225708, | |
| "learning_rate": 3.0859433552332036e-06, | |
| "loss": 0.0154, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.6279663179382495, | |
| "grad_norm": 0.4325566291809082, | |
| "learning_rate": 3.0770393795091224e-06, | |
| "loss": 0.0166, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.6305179892829802, | |
| "grad_norm": 0.5471740365028381, | |
| "learning_rate": 3.0681347246458805e-06, | |
| "loss": 0.012, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.633069660627711, | |
| "grad_norm": 1.0500181913375854, | |
| "learning_rate": 3.0592294691422905e-06, | |
| "loss": 0.0163, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.635621331972442, | |
| "grad_norm": 0.5767816305160522, | |
| "learning_rate": 3.0503236915024603e-06, | |
| "loss": 0.0126, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.6381730033171729, | |
| "grad_norm": 1.0612276792526245, | |
| "learning_rate": 3.0414174702351e-06, | |
| "loss": 0.0137, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.6407246746619035, | |
| "grad_norm": 0.5275915265083313, | |
| "learning_rate": 3.0325108838528313e-06, | |
| "loss": 0.0126, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.6432763460066342, | |
| "grad_norm": 0.3712652027606964, | |
| "learning_rate": 3.0236040108714945e-06, | |
| "loss": 0.0134, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.645828017351365, | |
| "grad_norm": 0.516525387763977, | |
| "learning_rate": 3.014696929809456e-06, | |
| "loss": 0.0134, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.648379688696096, | |
| "grad_norm": 0.6508063077926636, | |
| "learning_rate": 3.005789719186917e-06, | |
| "loss": 0.016, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.6509313600408269, | |
| "grad_norm": 0.25411614775657654, | |
| "learning_rate": 2.9968824575252196e-06, | |
| "loss": 0.0143, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.6534830313855575, | |
| "grad_norm": 0.4073875844478607, | |
| "learning_rate": 2.9879752233461587e-06, | |
| "loss": 0.0124, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.6560347027302882, | |
| "grad_norm": 0.8705873489379883, | |
| "learning_rate": 2.9790680951712836e-06, | |
| "loss": 0.0113, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.658586374075019, | |
| "grad_norm": 0.9108631610870361, | |
| "learning_rate": 2.9701611515212113e-06, | |
| "loss": 0.0148, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.66113804541975, | |
| "grad_norm": 0.5919849872589111, | |
| "learning_rate": 2.961254470914931e-06, | |
| "loss": 0.0131, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.6636897167644809, | |
| "grad_norm": 0.5540559887886047, | |
| "learning_rate": 2.952348131869112e-06, | |
| "loss": 0.0127, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.6662413881092115, | |
| "grad_norm": 0.23948337137699127, | |
| "learning_rate": 2.9434422128974165e-06, | |
| "loss": 0.0134, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.6687930594539422, | |
| "grad_norm": 0.6661424040794373, | |
| "learning_rate": 2.934536792509801e-06, | |
| "loss": 0.0118, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.671344730798673, | |
| "grad_norm": 0.2371857613325119, | |
| "learning_rate": 2.925631949211825e-06, | |
| "loss": 0.011, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.673896402143404, | |
| "grad_norm": 0.9995185732841492, | |
| "learning_rate": 2.916727761503963e-06, | |
| "loss": 0.0133, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.6764480734881348, | |
| "grad_norm": 0.30217620730400085, | |
| "learning_rate": 2.9078243078809106e-06, | |
| "loss": 0.0133, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.6789997448328655, | |
| "grad_norm": 0.6044448018074036, | |
| "learning_rate": 2.8989216668308897e-06, | |
| "loss": 0.0132, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.6815514161775962, | |
| "grad_norm": 0.6776731610298157, | |
| "learning_rate": 2.8900199168349616e-06, | |
| "loss": 0.0149, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.684103087522327, | |
| "grad_norm": 0.5117316246032715, | |
| "learning_rate": 2.881119136366332e-06, | |
| "loss": 0.0146, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.686654758867058, | |
| "grad_norm": 0.6031244993209839, | |
| "learning_rate": 2.8722194038896565e-06, | |
| "loss": 0.0122, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.6892064302117888, | |
| "grad_norm": 0.5227133631706238, | |
| "learning_rate": 2.863320797860357e-06, | |
| "loss": 0.0164, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.6917581015565195, | |
| "grad_norm": 0.5128726959228516, | |
| "learning_rate": 2.854423396723922e-06, | |
| "loss": 0.0129, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.6943097729012502, | |
| "grad_norm": 0.677301824092865, | |
| "learning_rate": 2.845527278915219e-06, | |
| "loss": 0.0111, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.696861444245981, | |
| "grad_norm": 1.8520700931549072, | |
| "learning_rate": 2.836632522857803e-06, | |
| "loss": 0.0128, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.699413115590712, | |
| "grad_norm": 0.43956881761550903, | |
| "learning_rate": 2.827739206963225e-06, | |
| "loss": 0.0142, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.7019647869354428, | |
| "grad_norm": 1.2026915550231934, | |
| "learning_rate": 2.818847409630339e-06, | |
| "loss": 0.0122, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.7045164582801735, | |
| "grad_norm": 1.0047426223754883, | |
| "learning_rate": 2.809957209244611e-06, | |
| "loss": 0.0132, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.7070681296249042, | |
| "grad_norm": 0.4368385672569275, | |
| "learning_rate": 2.801068684177432e-06, | |
| "loss": 0.0155, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.709619800969635, | |
| "grad_norm": 2.1144449710845947, | |
| "learning_rate": 2.792181912785422e-06, | |
| "loss": 0.0133, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.712171472314366, | |
| "grad_norm": 0.46182388067245483, | |
| "learning_rate": 2.7832969734097437e-06, | |
| "loss": 0.0132, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.7147231436590968, | |
| "grad_norm": 1.1694209575653076, | |
| "learning_rate": 2.774413944375408e-06, | |
| "loss": 0.0126, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.7172748150038275, | |
| "grad_norm": 0.6728443503379822, | |
| "learning_rate": 2.765532903990586e-06, | |
| "loss": 0.0125, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.7198264863485582, | |
| "grad_norm": 0.3873930871486664, | |
| "learning_rate": 2.7566539305459167e-06, | |
| "loss": 0.0151, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.722378157693289, | |
| "grad_norm": 0.5007622838020325, | |
| "learning_rate": 2.747777102313819e-06, | |
| "loss": 0.0114, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.72492982903802, | |
| "grad_norm": 1.0474838018417358, | |
| "learning_rate": 2.7389024975478e-06, | |
| "loss": 0.0164, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.7274815003827508, | |
| "grad_norm": 0.9246062636375427, | |
| "learning_rate": 2.730030194481768e-06, | |
| "loss": 0.0138, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.7300331717274815, | |
| "grad_norm": 0.7463748455047607, | |
| "learning_rate": 2.721160271329339e-06, | |
| "loss": 0.0129, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.7325848430722122, | |
| "grad_norm": 0.39589956402778625, | |
| "learning_rate": 2.712292806283149e-06, | |
| "loss": 0.012, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.735136514416943, | |
| "grad_norm": 0.7912735342979431, | |
| "learning_rate": 2.703427877514164e-06, | |
| "loss": 0.0131, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.737688185761674, | |
| "grad_norm": 0.9773954749107361, | |
| "learning_rate": 2.694565563170992e-06, | |
| "loss": 0.0119, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.7402398571064048, | |
| "grad_norm": 0.45853710174560547, | |
| "learning_rate": 2.685705941379194e-06, | |
| "loss": 0.0135, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.7427915284511355, | |
| "grad_norm": 0.4193810522556305, | |
| "learning_rate": 2.676849090240596e-06, | |
| "loss": 0.0117, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.7453431997958662, | |
| "grad_norm": 0.5059391856193542, | |
| "learning_rate": 2.6679950878325967e-06, | |
| "loss": 0.0139, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.747894871140597, | |
| "grad_norm": 1.798842191696167, | |
| "learning_rate": 2.6591440122074846e-06, | |
| "loss": 0.013, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.750446542485328, | |
| "grad_norm": 0.6951261162757874, | |
| "learning_rate": 2.6502959413917437e-06, | |
| "loss": 0.0109, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.7529982138300588, | |
| "grad_norm": 1.0176141262054443, | |
| "learning_rate": 2.6414509533853728e-06, | |
| "loss": 0.0121, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.7555498851747895, | |
| "grad_norm": 1.5015063285827637, | |
| "learning_rate": 2.632609126161191e-06, | |
| "loss": 0.0099, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.7581015565195202, | |
| "grad_norm": 0.5768877863883972, | |
| "learning_rate": 2.6237705376641576e-06, | |
| "loss": 0.0135, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.760653227864251, | |
| "grad_norm": 2.051805257797241, | |
| "learning_rate": 2.6149352658106775e-06, | |
| "loss": 0.0136, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.763204899208982, | |
| "grad_norm": 0.3947019577026367, | |
| "learning_rate": 2.6061033884879193e-06, | |
| "loss": 0.011, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.7657565705537128, | |
| "grad_norm": 0.5471501350402832, | |
| "learning_rate": 2.5972749835531266e-06, | |
| "loss": 0.013, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.7683082418984435, | |
| "grad_norm": 0.5119963884353638, | |
| "learning_rate": 2.5884501288329326e-06, | |
| "loss": 0.0112, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.7708599132431742, | |
| "grad_norm": 1.8591612577438354, | |
| "learning_rate": 2.5796289021226722e-06, | |
| "loss": 0.0133, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.773411584587905, | |
| "grad_norm": 0.6567859053611755, | |
| "learning_rate": 2.570811381185701e-06, | |
| "loss": 0.0134, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.775963255932636, | |
| "grad_norm": 0.4754619002342224, | |
| "learning_rate": 2.5619976437527034e-06, | |
| "loss": 0.0135, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.7785149272773668, | |
| "grad_norm": 0.66524338722229, | |
| "learning_rate": 2.5531877675210114e-06, | |
| "loss": 0.0155, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.7810665986220975, | |
| "grad_norm": 0.5068429708480835, | |
| "learning_rate": 2.54438183015392e-06, | |
| "loss": 0.0124, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.7836182699668282, | |
| "grad_norm": 0.700384795665741, | |
| "learning_rate": 2.5355799092799975e-06, | |
| "loss": 0.0116, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.786169941311559, | |
| "grad_norm": 0.24284306168556213, | |
| "learning_rate": 2.526782082492408e-06, | |
| "loss": 0.0105, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.78872161265629, | |
| "grad_norm": 0.7343897223472595, | |
| "learning_rate": 2.5179884273482245e-06, | |
| "loss": 0.0135, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.7912732840010208, | |
| "grad_norm": 0.4073348641395569, | |
| "learning_rate": 2.509199021367743e-06, | |
| "loss": 0.011, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.7938249553457515, | |
| "grad_norm": 0.6841785907745361, | |
| "learning_rate": 2.5004139420338027e-06, | |
| "loss": 0.0116, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.7963766266904821, | |
| "grad_norm": 1.4202295541763306, | |
| "learning_rate": 2.491633266791101e-06, | |
| "loss": 0.0126, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.798928298035213, | |
| "grad_norm": 0.4704054296016693, | |
| "learning_rate": 2.482857073045508e-06, | |
| "loss": 0.013, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.801479969379944, | |
| "grad_norm": 0.650240957736969, | |
| "learning_rate": 2.4740854381633936e-06, | |
| "loss": 0.0113, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.8040316407246748, | |
| "grad_norm": 0.936140239238739, | |
| "learning_rate": 2.4653184394709347e-06, | |
| "loss": 0.0129, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.8065833120694055, | |
| "grad_norm": 0.6480603218078613, | |
| "learning_rate": 2.4565561542534372e-06, | |
| "loss": 0.0132, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.8091349834141361, | |
| "grad_norm": 0.7110511064529419, | |
| "learning_rate": 2.4477986597546585e-06, | |
| "loss": 0.0124, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.811686654758867, | |
| "grad_norm": 0.6329154372215271, | |
| "learning_rate": 2.4390460331761213e-06, | |
| "loss": 0.0114, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.814238326103598, | |
| "grad_norm": 0.3106624484062195, | |
| "learning_rate": 2.4302983516764366e-06, | |
| "loss": 0.0104, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.8167899974483288, | |
| "grad_norm": 0.49600428342819214, | |
| "learning_rate": 2.421555692370622e-06, | |
| "loss": 0.0139, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.8193416687930595, | |
| "grad_norm": 0.7423809766769409, | |
| "learning_rate": 2.4128181323294206e-06, | |
| "loss": 0.0114, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.8218933401377901, | |
| "grad_norm": 0.3624695837497711, | |
| "learning_rate": 2.404085748578625e-06, | |
| "loss": 0.0149, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.824445011482521, | |
| "grad_norm": 0.2720579206943512, | |
| "learning_rate": 2.3953586180983947e-06, | |
| "loss": 0.01, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.826996682827252, | |
| "grad_norm": 0.2844093441963196, | |
| "learning_rate": 2.3866368178225797e-06, | |
| "loss": 0.0119, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.8295483541719828, | |
| "grad_norm": 1.0419397354125977, | |
| "learning_rate": 2.3779204246380412e-06, | |
| "loss": 0.0115, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.8321000255167135, | |
| "grad_norm": 0.7186925411224365, | |
| "learning_rate": 2.3692095153839766e-06, | |
| "loss": 0.0136, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.8346516968614441, | |
| "grad_norm": 1.3992877006530762, | |
| "learning_rate": 2.360504166851238e-06, | |
| "loss": 0.0123, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.837203368206175, | |
| "grad_norm": 0.70779949426651, | |
| "learning_rate": 2.351804455781655e-06, | |
| "loss": 0.0119, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.839755039550906, | |
| "grad_norm": 0.6416905522346497, | |
| "learning_rate": 2.343110458867364e-06, | |
| "loss": 0.012, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.8423067108956368, | |
| "grad_norm": 0.5262870192527771, | |
| "learning_rate": 2.3344222527501267e-06, | |
| "loss": 0.0144, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.8448583822403675, | |
| "grad_norm": 0.3885171711444855, | |
| "learning_rate": 2.3257399140206547e-06, | |
| "loss": 0.0142, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.8474100535850981, | |
| "grad_norm": 0.6644590497016907, | |
| "learning_rate": 2.3170635192179396e-06, | |
| "loss": 0.0096, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.849961724929829, | |
| "grad_norm": 0.5656073689460754, | |
| "learning_rate": 2.308393144828571e-06, | |
| "loss": 0.0111, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.85251339627456, | |
| "grad_norm": 0.26087692379951477, | |
| "learning_rate": 2.2997288672860663e-06, | |
| "loss": 0.0101, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.8550650676192908, | |
| "grad_norm": 0.6924827098846436, | |
| "learning_rate": 2.2910707629701975e-06, | |
| "loss": 0.013, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.8576167389640215, | |
| "grad_norm": 0.7471089363098145, | |
| "learning_rate": 2.282418908206315e-06, | |
| "loss": 0.0098, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.8601684103087521, | |
| "grad_norm": 1.0280992984771729, | |
| "learning_rate": 2.2737733792646773e-06, | |
| "loss": 0.014, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.862720081653483, | |
| "grad_norm": 0.46094390749931335, | |
| "learning_rate": 2.2651342523597784e-06, | |
| "loss": 0.012, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.865271752998214, | |
| "grad_norm": 0.7856783866882324, | |
| "learning_rate": 2.2565016036496746e-06, | |
| "loss": 0.0103, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.8678234243429448, | |
| "grad_norm": 0.6690205931663513, | |
| "learning_rate": 2.247875509235314e-06, | |
| "loss": 0.0101, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.8703750956876755, | |
| "grad_norm": 0.8430372476577759, | |
| "learning_rate": 2.2392560451598635e-06, | |
| "loss": 0.0143, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.8729267670324061, | |
| "grad_norm": 0.931174635887146, | |
| "learning_rate": 2.2306432874080436e-06, | |
| "loss": 0.0133, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.875478438377137, | |
| "grad_norm": 0.2438133805990219, | |
| "learning_rate": 2.2220373119054524e-06, | |
| "loss": 0.0125, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.878030109721868, | |
| "grad_norm": 0.7844971418380737, | |
| "learning_rate": 2.2134381945179023e-06, | |
| "loss": 0.0119, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.8805817810665986, | |
| "grad_norm": 0.3950898349285126, | |
| "learning_rate": 2.204846011050746e-06, | |
| "loss": 0.0136, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.8831334524113295, | |
| "grad_norm": 0.7956571578979492, | |
| "learning_rate": 2.1962608372482105e-06, | |
| "loss": 0.0107, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.8856851237560601, | |
| "grad_norm": 0.3765330910682678, | |
| "learning_rate": 2.187682748792729e-06, | |
| "loss": 0.0094, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.888236795100791, | |
| "grad_norm": 0.5307440161705017, | |
| "learning_rate": 2.1791118213042754e-06, | |
| "loss": 0.0103, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.890788466445522, | |
| "grad_norm": 0.5232400298118591, | |
| "learning_rate": 2.1705481303396934e-06, | |
| "loss": 0.0122, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.8933401377902526, | |
| "grad_norm": 0.9073548913002014, | |
| "learning_rate": 2.161991751392038e-06, | |
| "loss": 0.0108, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.8958918091349835, | |
| "grad_norm": 0.33009836077690125, | |
| "learning_rate": 2.1534427598899006e-06, | |
| "loss": 0.0112, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.8984434804797141, | |
| "grad_norm": 0.528856635093689, | |
| "learning_rate": 2.1449012311967517e-06, | |
| "loss": 0.0103, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.900995151824445, | |
| "grad_norm": 0.3639775514602661, | |
| "learning_rate": 2.136367240610271e-06, | |
| "loss": 0.0105, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.903546823169176, | |
| "grad_norm": 0.4201386570930481, | |
| "learning_rate": 2.1278408633616884e-06, | |
| "loss": 0.0123, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.9060984945139066, | |
| "grad_norm": 0.47134822607040405, | |
| "learning_rate": 2.1193221746151174e-06, | |
| "loss": 0.0109, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.9086501658586374, | |
| "grad_norm": 1.3191474676132202, | |
| "learning_rate": 2.1108112494668945e-06, | |
| "loss": 0.0104, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.9112018372033681, | |
| "grad_norm": 0.27142035961151123, | |
| "learning_rate": 2.1023081629449156e-06, | |
| "loss": 0.0097, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.913753508548099, | |
| "grad_norm": 0.6145235300064087, | |
| "learning_rate": 2.0938129900079753e-06, | |
| "loss": 0.0104, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.91630517989283, | |
| "grad_norm": 1.2480336427688599, | |
| "learning_rate": 2.0853258055451077e-06, | |
| "loss": 0.0126, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.9188568512375606, | |
| "grad_norm": 1.0720759630203247, | |
| "learning_rate": 2.0768466843749215e-06, | |
| "loss": 0.0137, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.9214085225822914, | |
| "grad_norm": 0.550995409488678, | |
| "learning_rate": 2.068375701244946e-06, | |
| "loss": 0.011, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.923960193927022, | |
| "grad_norm": 0.5243539810180664, | |
| "learning_rate": 2.0599129308309695e-06, | |
| "loss": 0.0113, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.926511865271753, | |
| "grad_norm": 0.43778249621391296, | |
| "learning_rate": 2.051458447736382e-06, | |
| "loss": 0.0149, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.9290635366164839, | |
| "grad_norm": 0.5081802010536194, | |
| "learning_rate": 2.0430123264915144e-06, | |
| "loss": 0.011, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.9316152079612146, | |
| "grad_norm": 0.6080982089042664, | |
| "learning_rate": 2.034574641552987e-06, | |
| "loss": 0.0133, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.9341668793059454, | |
| "grad_norm": 0.5425201058387756, | |
| "learning_rate": 2.0261454673030458e-06, | |
| "loss": 0.013, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.936718550650676, | |
| "grad_norm": 0.2850346863269806, | |
| "learning_rate": 2.0177248780489145e-06, | |
| "loss": 0.0116, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.939270221995407, | |
| "grad_norm": 0.9347357749938965, | |
| "learning_rate": 2.009312948022136e-06, | |
| "loss": 0.0105, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.9418218933401379, | |
| "grad_norm": 0.6447896957397461, | |
| "learning_rate": 2.000909751377916e-06, | |
| "loss": 0.0111, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.9443735646848685, | |
| "grad_norm": 0.8438960313796997, | |
| "learning_rate": 1.9925153621944724e-06, | |
| "loss": 0.012, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.9469252360295994, | |
| "grad_norm": 0.48641327023506165, | |
| "learning_rate": 1.9841298544723804e-06, | |
| "loss": 0.0112, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.94947690737433, | |
| "grad_norm": 0.8980324268341064, | |
| "learning_rate": 1.9757533021339214e-06, | |
| "loss": 0.01, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.952028578719061, | |
| "grad_norm": 0.6476429104804993, | |
| "learning_rate": 1.96738577902243e-06, | |
| "loss": 0.0125, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.9545802500637919, | |
| "grad_norm": 0.34102582931518555, | |
| "learning_rate": 1.9590273589016455e-06, | |
| "loss": 0.0111, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.9571319214085225, | |
| "grad_norm": 0.42341795563697815, | |
| "learning_rate": 1.950678115455059e-06, | |
| "loss": 0.01, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.9596835927532534, | |
| "grad_norm": 0.6199698448181152, | |
| "learning_rate": 1.9423381222852638e-06, | |
| "loss": 0.0089, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.962235264097984, | |
| "grad_norm": 1.1667146682739258, | |
| "learning_rate": 1.93400745291331e-06, | |
| "loss": 0.0144, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.964786935442715, | |
| "grad_norm": 0.36817818880081177, | |
| "learning_rate": 1.9256861807780505e-06, | |
| "loss": 0.0111, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.9673386067874459, | |
| "grad_norm": 0.34932783246040344, | |
| "learning_rate": 1.9173743792354984e-06, | |
| "loss": 0.0128, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.9698902781321765, | |
| "grad_norm": 1.2853580713272095, | |
| "learning_rate": 1.9090721215581833e-06, | |
| "loss": 0.0123, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.9724419494769072, | |
| "grad_norm": 0.9818376302719116, | |
| "learning_rate": 1.9007794809344937e-06, | |
| "loss": 0.0107, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.974993620821638, | |
| "grad_norm": 0.5679308176040649, | |
| "learning_rate": 1.892496530468045e-06, | |
| "loss": 0.0111, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.977545292166369, | |
| "grad_norm": 0.4230453073978424, | |
| "learning_rate": 1.8842233431770254e-06, | |
| "loss": 0.0113, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.9800969635110999, | |
| "grad_norm": 0.7522900700569153, | |
| "learning_rate": 1.8759599919935588e-06, | |
| "loss": 0.0129, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.9826486348558305, | |
| "grad_norm": 0.9982630610466003, | |
| "learning_rate": 1.8677065497630554e-06, | |
| "loss": 0.0101, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.9852003062005612, | |
| "grad_norm": 0.9997731447219849, | |
| "learning_rate": 1.8594630892435793e-06, | |
| "loss": 0.0098, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.987751977545292, | |
| "grad_norm": 0.8870327472686768, | |
| "learning_rate": 1.851229683105195e-06, | |
| "loss": 0.0107, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.990303648890023, | |
| "grad_norm": 0.8995794057846069, | |
| "learning_rate": 1.8430064039293358e-06, | |
| "loss": 0.0109, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.9928553202347539, | |
| "grad_norm": 0.4320782721042633, | |
| "learning_rate": 1.8347933242081597e-06, | |
| "loss": 0.0124, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.9954069915794845, | |
| "grad_norm": 0.7484515309333801, | |
| "learning_rate": 1.8265905163439137e-06, | |
| "loss": 0.0104, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.9979586629242152, | |
| "grad_norm": 0.7955275177955627, | |
| "learning_rate": 1.8183980526482893e-06, | |
| "loss": 0.0126, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 2.000510334268946, | |
| "grad_norm": 0.43744155764579773, | |
| "learning_rate": 1.8102160053417954e-06, | |
| "loss": 0.0105, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 2.003062005613677, | |
| "grad_norm": 0.35822105407714844, | |
| "learning_rate": 1.8020444465531095e-06, | |
| "loss": 0.0091, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 2.005613676958408, | |
| "grad_norm": 0.23200540244579315, | |
| "learning_rate": 1.793883448318451e-06, | |
| "loss": 0.0105, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 2.0081653483031388, | |
| "grad_norm": 0.8042770028114319, | |
| "learning_rate": 1.7857330825809412e-06, | |
| "loss": 0.0094, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 2.010717019647869, | |
| "grad_norm": 0.8214767575263977, | |
| "learning_rate": 1.7775934211899733e-06, | |
| "loss": 0.0101, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 2.0132686909926, | |
| "grad_norm": 0.41219642758369446, | |
| "learning_rate": 1.7694645359005726e-06, | |
| "loss": 0.01, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 2.015820362337331, | |
| "grad_norm": 0.846447229385376, | |
| "learning_rate": 1.7613464983727727e-06, | |
| "loss": 0.0105, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 2.018372033682062, | |
| "grad_norm": 0.4125836193561554, | |
| "learning_rate": 1.753239380170976e-06, | |
| "loss": 0.0098, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 2.0209237050267927, | |
| "grad_norm": 0.33059266209602356, | |
| "learning_rate": 1.7451432527633247e-06, | |
| "loss": 0.01, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 2.023475376371523, | |
| "grad_norm": 0.26018354296684265, | |
| "learning_rate": 1.737058187521076e-06, | |
| "loss": 0.0102, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 2.026027047716254, | |
| "grad_norm": 0.9150725603103638, | |
| "learning_rate": 1.7289842557179647e-06, | |
| "loss": 0.0085, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 2.028578719060985, | |
| "grad_norm": 0.6766268610954285, | |
| "learning_rate": 1.7209215285295795e-06, | |
| "loss": 0.0076, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 2.031130390405716, | |
| "grad_norm": 0.7343044877052307, | |
| "learning_rate": 1.7128700770327373e-06, | |
| "loss": 0.0088, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 2.0336820617504467, | |
| "grad_norm": 0.3323941230773926, | |
| "learning_rate": 1.7048299722048546e-06, | |
| "loss": 0.0087, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 2.036233733095177, | |
| "grad_norm": 0.16270656883716583, | |
| "learning_rate": 1.6968012849233174e-06, | |
| "loss": 0.0068, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 2.038785404439908, | |
| "grad_norm": 0.46309563517570496, | |
| "learning_rate": 1.6887840859648656e-06, | |
| "loss": 0.0093, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 2.041337075784639, | |
| "grad_norm": 0.5606943368911743, | |
| "learning_rate": 1.680778446004961e-06, | |
| "loss": 0.0071, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.04388874712937, | |
| "grad_norm": 0.6630533337593079, | |
| "learning_rate": 1.672784435617166e-06, | |
| "loss": 0.009, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 2.0464404184741007, | |
| "grad_norm": 0.2969760596752167, | |
| "learning_rate": 1.6648021252725266e-06, | |
| "loss": 0.0073, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 2.048992089818831, | |
| "grad_norm": 0.30385419726371765, | |
| "learning_rate": 1.6568315853389458e-06, | |
| "loss": 0.0115, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 2.051543761163562, | |
| "grad_norm": 1.2547024488449097, | |
| "learning_rate": 1.6488728860805627e-06, | |
| "loss": 0.0089, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 2.054095432508293, | |
| "grad_norm": 1.2566521167755127, | |
| "learning_rate": 1.6409260976571383e-06, | |
| "loss": 0.0099, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 2.056647103853024, | |
| "grad_norm": 0.28552380204200745, | |
| "learning_rate": 1.6329912901234318e-06, | |
| "loss": 0.0088, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 2.0591987751977547, | |
| "grad_norm": 0.6307502388954163, | |
| "learning_rate": 1.6250685334285845e-06, | |
| "loss": 0.0111, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 2.061750446542485, | |
| "grad_norm": 0.7623242139816284, | |
| "learning_rate": 1.6171578974155057e-06, | |
| "loss": 0.0074, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 2.064302117887216, | |
| "grad_norm": 0.3438488841056824, | |
| "learning_rate": 1.6092594518202554e-06, | |
| "loss": 0.0094, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 2.066853789231947, | |
| "grad_norm": 0.6200628280639648, | |
| "learning_rate": 1.6013732662714267e-06, | |
| "loss": 0.0088, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.069405460576678, | |
| "grad_norm": 0.407311350107193, | |
| "learning_rate": 1.5934994102895367e-06, | |
| "loss": 0.0077, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 2.0719571319214087, | |
| "grad_norm": 0.7597397565841675, | |
| "learning_rate": 1.5856379532864109e-06, | |
| "loss": 0.0096, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 2.074508803266139, | |
| "grad_norm": 0.30682581663131714, | |
| "learning_rate": 1.577788964564568e-06, | |
| "loss": 0.0072, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 2.07706047461087, | |
| "grad_norm": 0.4774474501609802, | |
| "learning_rate": 1.5699525133166222e-06, | |
| "loss": 0.0075, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 2.079612145955601, | |
| "grad_norm": 0.3659365773200989, | |
| "learning_rate": 1.5621286686246547e-06, | |
| "loss": 0.0108, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 2.082163817300332, | |
| "grad_norm": 0.8173565864562988, | |
| "learning_rate": 1.5543174994596167e-06, | |
| "loss": 0.0096, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 2.0847154886450627, | |
| "grad_norm": 0.5068744421005249, | |
| "learning_rate": 1.5465190746807209e-06, | |
| "loss": 0.0101, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 2.087267159989793, | |
| "grad_norm": 1.1201165914535522, | |
| "learning_rate": 1.5387334630348289e-06, | |
| "loss": 0.0098, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 2.089818831334524, | |
| "grad_norm": 0.5641067028045654, | |
| "learning_rate": 1.5309607331558481e-06, | |
| "loss": 0.0092, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 2.092370502679255, | |
| "grad_norm": 0.6286213397979736, | |
| "learning_rate": 1.5232009535641325e-06, | |
| "loss": 0.0111, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.094922174023986, | |
| "grad_norm": 0.3738521337509155, | |
| "learning_rate": 1.5154541926658668e-06, | |
| "loss": 0.008, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 2.0974738453687163, | |
| "grad_norm": 0.9565776586532593, | |
| "learning_rate": 1.507720518752471e-06, | |
| "loss": 0.0084, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 2.100025516713447, | |
| "grad_norm": 0.4144324064254761, | |
| "learning_rate": 1.5000000000000007e-06, | |
| "loss": 0.008, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 2.102577188058178, | |
| "grad_norm": 0.4653526842594147, | |
| "learning_rate": 1.4922927044685366e-06, | |
| "loss": 0.0106, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 2.105128859402909, | |
| "grad_norm": 0.2316306233406067, | |
| "learning_rate": 1.4845987001015956e-06, | |
| "loss": 0.0075, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.10768053074764, | |
| "grad_norm": 0.2574253976345062, | |
| "learning_rate": 1.4769180547255246e-06, | |
| "loss": 0.01, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 2.1102322020923703, | |
| "grad_norm": 0.5861005187034607, | |
| "learning_rate": 1.469250836048904e-06, | |
| "loss": 0.0087, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 2.112783873437101, | |
| "grad_norm": 0.5676212310791016, | |
| "learning_rate": 1.4615971116619504e-06, | |
| "loss": 0.0097, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.115335544781832, | |
| "grad_norm": 0.7331516146659851, | |
| "learning_rate": 1.453956949035926e-06, | |
| "loss": 0.0073, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 2.117887216126563, | |
| "grad_norm": 0.6896494626998901, | |
| "learning_rate": 1.446330415522534e-06, | |
| "loss": 0.0083, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.120438887471294, | |
| "grad_norm": 0.4225230813026428, | |
| "learning_rate": 1.438717578353334e-06, | |
| "loss": 0.0081, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 2.1229905588160243, | |
| "grad_norm": 0.5864181518554688, | |
| "learning_rate": 1.4311185046391469e-06, | |
| "loss": 0.0083, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 2.125542230160755, | |
| "grad_norm": 0.4726142883300781, | |
| "learning_rate": 1.4235332613694585e-06, | |
| "loss": 0.0091, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 2.128093901505486, | |
| "grad_norm": 0.4282633364200592, | |
| "learning_rate": 1.4159619154118332e-06, | |
| "loss": 0.0101, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.130645572850217, | |
| "grad_norm": 0.44410398602485657, | |
| "learning_rate": 1.408404533511327e-06, | |
| "loss": 0.0102, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.133197244194948, | |
| "grad_norm": 0.5199018716812134, | |
| "learning_rate": 1.4008611822898924e-06, | |
| "loss": 0.0119, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 2.1357489155396783, | |
| "grad_norm": 0.37344205379486084, | |
| "learning_rate": 1.3933319282457967e-06, | |
| "loss": 0.0104, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 2.138300586884409, | |
| "grad_norm": 0.36821675300598145, | |
| "learning_rate": 1.3858168377530349e-06, | |
| "loss": 0.0072, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 2.14085225822914, | |
| "grad_norm": 0.8769086003303528, | |
| "learning_rate": 1.3783159770607406e-06, | |
| "loss": 0.0083, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 2.143403929573871, | |
| "grad_norm": 0.5919402837753296, | |
| "learning_rate": 1.370829412292605e-06, | |
| "loss": 0.0103, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.145955600918602, | |
| "grad_norm": 1.004891037940979, | |
| "learning_rate": 1.363357209446298e-06, | |
| "loss": 0.0107, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 2.1485072722633323, | |
| "grad_norm": 0.25619760155677795, | |
| "learning_rate": 1.3558994343928764e-06, | |
| "loss": 0.009, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 2.151058943608063, | |
| "grad_norm": 0.2878786623477936, | |
| "learning_rate": 1.348456152876214e-06, | |
| "loss": 0.008, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 2.153610614952794, | |
| "grad_norm": 0.6479395627975464, | |
| "learning_rate": 1.3410274305124155e-06, | |
| "loss": 0.0097, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 2.156162286297525, | |
| "grad_norm": 0.42222821712493896, | |
| "learning_rate": 1.3336133327892383e-06, | |
| "loss": 0.009, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.158713957642256, | |
| "grad_norm": 0.5351365804672241, | |
| "learning_rate": 1.3262139250655168e-06, | |
| "loss": 0.0114, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.1612656289869863, | |
| "grad_norm": 0.4197307825088501, | |
| "learning_rate": 1.3188292725705888e-06, | |
| "loss": 0.0077, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 2.163817300331717, | |
| "grad_norm": 0.7340565919876099, | |
| "learning_rate": 1.3114594404037136e-06, | |
| "loss": 0.0066, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 2.166368971676448, | |
| "grad_norm": 0.9122597575187683, | |
| "learning_rate": 1.3041044935335052e-06, | |
| "loss": 0.0107, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 2.168920643021179, | |
| "grad_norm": 0.8086641430854797, | |
| "learning_rate": 1.2967644967973563e-06, | |
| "loss": 0.0104, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.17147231436591, | |
| "grad_norm": 0.7990751266479492, | |
| "learning_rate": 1.2894395149008647e-06, | |
| "loss": 0.0089, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 2.1740239857106403, | |
| "grad_norm": 0.4715738892555237, | |
| "learning_rate": 1.2821296124172686e-06, | |
| "loss": 0.0079, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.176575657055371, | |
| "grad_norm": 0.8491727709770203, | |
| "learning_rate": 1.2748348537868711e-06, | |
| "loss": 0.0106, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 2.179127328400102, | |
| "grad_norm": 0.2744673788547516, | |
| "learning_rate": 1.2675553033164747e-06, | |
| "loss": 0.009, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 2.181678999744833, | |
| "grad_norm": 0.8372253775596619, | |
| "learning_rate": 1.2602910251788165e-06, | |
| "loss": 0.0091, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.184230671089564, | |
| "grad_norm": 0.6588044762611389, | |
| "learning_rate": 1.2530420834120021e-06, | |
| "loss": 0.0081, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 2.1867823424342943, | |
| "grad_norm": 0.642224133014679, | |
| "learning_rate": 1.245808541918935e-06, | |
| "loss": 0.008, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 2.189334013779025, | |
| "grad_norm": 0.6353108286857605, | |
| "learning_rate": 1.2385904644667618e-06, | |
| "loss": 0.0076, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.191885685123756, | |
| "grad_norm": 0.8236711025238037, | |
| "learning_rate": 1.2313879146863048e-06, | |
| "loss": 0.01, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 2.194437356468487, | |
| "grad_norm": 1.0461316108703613, | |
| "learning_rate": 1.2242009560715007e-06, | |
| "loss": 0.0106, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.196989027813218, | |
| "grad_norm": 0.7238239049911499, | |
| "learning_rate": 1.2170296519788455e-06, | |
| "loss": 0.0082, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 2.1995406991579483, | |
| "grad_norm": 0.45300090312957764, | |
| "learning_rate": 1.209874065626833e-06, | |
| "loss": 0.0079, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 2.202092370502679, | |
| "grad_norm": 0.46156367659568787, | |
| "learning_rate": 1.2027342600953938e-06, | |
| "loss": 0.009, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 2.20464404184741, | |
| "grad_norm": 0.5707991719245911, | |
| "learning_rate": 1.195610298325348e-06, | |
| "loss": 0.0068, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.207195713192141, | |
| "grad_norm": 0.6443004608154297, | |
| "learning_rate": 1.188502243117842e-06, | |
| "loss": 0.0079, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.209747384536872, | |
| "grad_norm": 0.5062205791473389, | |
| "learning_rate": 1.1814101571337979e-06, | |
| "loss": 0.0081, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.2122990558816022, | |
| "grad_norm": 0.4682200849056244, | |
| "learning_rate": 1.1743341028933635e-06, | |
| "loss": 0.0099, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.214850727226333, | |
| "grad_norm": 0.9355303049087524, | |
| "learning_rate": 1.1672741427753596e-06, | |
| "loss": 0.0103, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.217402398571064, | |
| "grad_norm": 0.9519639611244202, | |
| "learning_rate": 1.1602303390167257e-06, | |
| "loss": 0.0091, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.219954069915795, | |
| "grad_norm": 0.9006462693214417, | |
| "learning_rate": 1.1532027537119796e-06, | |
| "loss": 0.0104, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.222505741260526, | |
| "grad_norm": 0.8201411366462708, | |
| "learning_rate": 1.1461914488126636e-06, | |
| "loss": 0.0101, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.2250574126052562, | |
| "grad_norm": 0.4761882722377777, | |
| "learning_rate": 1.1391964861267974e-06, | |
| "loss": 0.0092, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.227609083949987, | |
| "grad_norm": 1.0870357751846313, | |
| "learning_rate": 1.132217927318344e-06, | |
| "loss": 0.0079, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.230160755294718, | |
| "grad_norm": 0.6677237153053284, | |
| "learning_rate": 1.1252558339066518e-06, | |
| "loss": 0.0086, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.232712426639449, | |
| "grad_norm": 0.578660786151886, | |
| "learning_rate": 1.1183102672659192e-06, | |
| "loss": 0.0094, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.23526409798418, | |
| "grad_norm": 0.4101839065551758, | |
| "learning_rate": 1.111381288624657e-06, | |
| "loss": 0.0085, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.2378157693289102, | |
| "grad_norm": 0.3004243075847626, | |
| "learning_rate": 1.1044689590651418e-06, | |
| "loss": 0.0069, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.240367440673641, | |
| "grad_norm": 1.1306524276733398, | |
| "learning_rate": 1.0975733395228784e-06, | |
| "loss": 0.0094, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.242919112018372, | |
| "grad_norm": 0.6862215399742126, | |
| "learning_rate": 1.0906944907860723e-06, | |
| "loss": 0.0079, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.245470783363103, | |
| "grad_norm": 0.5847616791725159, | |
| "learning_rate": 1.0838324734950796e-06, | |
| "loss": 0.0069, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.248022454707834, | |
| "grad_norm": 0.3303333818912506, | |
| "learning_rate": 1.0769873481418796e-06, | |
| "loss": 0.0077, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.2505741260525642, | |
| "grad_norm": 0.25424349308013916, | |
| "learning_rate": 1.0701591750695445e-06, | |
| "loss": 0.008, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.253125797397295, | |
| "grad_norm": 0.4860968589782715, | |
| "learning_rate": 1.0633480144717018e-06, | |
| "loss": 0.0083, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.255677468742026, | |
| "grad_norm": 0.9478049874305725, | |
| "learning_rate": 1.0565539263920034e-06, | |
| "loss": 0.0086, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.258229140086757, | |
| "grad_norm": 0.6831640005111694, | |
| "learning_rate": 1.0497769707236058e-06, | |
| "loss": 0.0067, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.260780811431488, | |
| "grad_norm": 0.41167324781417847, | |
| "learning_rate": 1.0430172072086287e-06, | |
| "loss": 0.0087, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.2633324827762182, | |
| "grad_norm": 0.43332257866859436, | |
| "learning_rate": 1.0362746954376356e-06, | |
| "loss": 0.0083, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.265884154120949, | |
| "grad_norm": 0.5903699994087219, | |
| "learning_rate": 1.0295494948491105e-06, | |
| "loss": 0.0084, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.26843582546568, | |
| "grad_norm": 0.8941869735717773, | |
| "learning_rate": 1.0228416647289285e-06, | |
| "loss": 0.0091, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.270987496810411, | |
| "grad_norm": 0.24812312424182892, | |
| "learning_rate": 1.0161512642098346e-06, | |
| "loss": 0.0072, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.273539168155142, | |
| "grad_norm": 1.322016954421997, | |
| "learning_rate": 1.0094783522709284e-06, | |
| "loss": 0.01, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.2760908394998722, | |
| "grad_norm": 0.4932226538658142, | |
| "learning_rate": 1.0028229877371353e-06, | |
| "loss": 0.008, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.278642510844603, | |
| "grad_norm": 1.0810902118682861, | |
| "learning_rate": 9.961852292786908e-07, | |
| "loss": 0.0103, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.281194182189334, | |
| "grad_norm": 0.3352721929550171, | |
| "learning_rate": 9.895651354106285e-07, | |
| "loss": 0.0078, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.283745853534065, | |
| "grad_norm": 0.4848918914794922, | |
| "learning_rate": 9.829627644922561e-07, | |
| "loss": 0.0081, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.286297524878796, | |
| "grad_norm": 0.585834801197052, | |
| "learning_rate": 9.763781747266479e-07, | |
| "loss": 0.0092, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.2888491962235262, | |
| "grad_norm": 0.7620885372161865, | |
| "learning_rate": 9.698114241601284e-07, | |
| "loss": 0.0084, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.291400867568257, | |
| "grad_norm": 0.7390540242195129, | |
| "learning_rate": 9.632625706817594e-07, | |
| "loss": 0.0098, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.293952538912988, | |
| "grad_norm": 0.43986302614212036, | |
| "learning_rate": 9.56731672022831e-07, | |
| "loss": 0.0075, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.296504210257719, | |
| "grad_norm": 0.8151856064796448, | |
| "learning_rate": 9.502187857563568e-07, | |
| "loss": 0.0077, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.29905588160245, | |
| "grad_norm": 0.7944412231445312, | |
| "learning_rate": 9.437239692965569e-07, | |
| "loss": 0.0071, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.3016075529471802, | |
| "grad_norm": 0.6182528734207153, | |
| "learning_rate": 9.372472798983622e-07, | |
| "loss": 0.0069, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.304159224291911, | |
| "grad_norm": 0.47329604625701904, | |
| "learning_rate": 9.307887746569027e-07, | |
| "loss": 0.0115, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.306710895636642, | |
| "grad_norm": 0.6696287393569946, | |
| "learning_rate": 9.243485105070069e-07, | |
| "loss": 0.0079, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.309262566981373, | |
| "grad_norm": 0.7252456545829773, | |
| "learning_rate": 9.179265442226967e-07, | |
| "loss": 0.0085, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.3118142383261038, | |
| "grad_norm": 0.405748188495636, | |
| "learning_rate": 9.115229324166945e-07, | |
| "loss": 0.0086, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.314365909670834, | |
| "grad_norm": 0.2414780706167221, | |
| "learning_rate": 9.051377315399145e-07, | |
| "loss": 0.0071, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.316917581015565, | |
| "grad_norm": 0.9178037643432617, | |
| "learning_rate": 8.98770997880972e-07, | |
| "loss": 0.008, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.319469252360296, | |
| "grad_norm": 0.2835039496421814, | |
| "learning_rate": 8.924227875656858e-07, | |
| "loss": 0.0073, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.322020923705027, | |
| "grad_norm": 1.6318212747573853, | |
| "learning_rate": 8.860931565565804e-07, | |
| "loss": 0.0078, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.3245725950497578, | |
| "grad_norm": 0.8543625473976135, | |
| "learning_rate": 8.797821606523942e-07, | |
| "loss": 0.007, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.327124266394488, | |
| "grad_norm": 0.4498523771762848, | |
| "learning_rate": 8.734898554875923e-07, | |
| "loss": 0.0083, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.329675937739219, | |
| "grad_norm": 0.36660337448120117, | |
| "learning_rate": 8.672162965318668e-07, | |
| "loss": 0.008, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.33222760908395, | |
| "grad_norm": 0.6560646891593933, | |
| "learning_rate": 8.609615390896566e-07, | |
| "loss": 0.0096, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.334779280428681, | |
| "grad_norm": 0.532968282699585, | |
| "learning_rate": 8.547256382996558e-07, | |
| "loss": 0.0095, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.3373309517734118, | |
| "grad_norm": 1.1277568340301514, | |
| "learning_rate": 8.485086491343253e-07, | |
| "loss": 0.0082, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.339882623118142, | |
| "grad_norm": 0.6013373732566833, | |
| "learning_rate": 8.42310626399415e-07, | |
| "loss": 0.0096, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.342434294462873, | |
| "grad_norm": 0.4544149339199066, | |
| "learning_rate": 8.361316247334734e-07, | |
| "loss": 0.0078, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.344985965807604, | |
| "grad_norm": 0.3007356822490692, | |
| "learning_rate": 8.299716986073698e-07, | |
| "loss": 0.0071, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.347537637152335, | |
| "grad_norm": 0.275519996881485, | |
| "learning_rate": 8.238309023238144e-07, | |
| "loss": 0.0107, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.3500893084970658, | |
| "grad_norm": 0.516834020614624, | |
| "learning_rate": 8.177092900168793e-07, | |
| "loss": 0.0098, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.352640979841796, | |
| "grad_norm": 0.6302925944328308, | |
| "learning_rate": 8.116069156515177e-07, | |
| "loss": 0.008, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.355192651186527, | |
| "grad_norm": 0.2730187475681305, | |
| "learning_rate": 8.055238330230943e-07, | |
| "loss": 0.0087, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.357744322531258, | |
| "grad_norm": 0.948367714881897, | |
| "learning_rate": 7.994600957569057e-07, | |
| "loss": 0.0084, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.360295993875989, | |
| "grad_norm": 0.4010712504386902, | |
| "learning_rate": 7.934157573077096e-07, | |
| "loss": 0.0083, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.3628476652207198, | |
| "grad_norm": 0.5648771524429321, | |
| "learning_rate": 7.873908709592559e-07, | |
| "loss": 0.0071, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.36539933656545, | |
| "grad_norm": 1.0915343761444092, | |
| "learning_rate": 7.813854898238146e-07, | |
| "loss": 0.008, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.367951007910181, | |
| "grad_norm": 0.3447641134262085, | |
| "learning_rate": 7.753996668417053e-07, | |
| "loss": 0.0057, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.370502679254912, | |
| "grad_norm": 0.5241166949272156, | |
| "learning_rate": 7.694334547808362e-07, | |
| "loss": 0.0072, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.373054350599643, | |
| "grad_norm": 0.875315248966217, | |
| "learning_rate": 7.63486906236234e-07, | |
| "loss": 0.0072, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.3756060219443738, | |
| "grad_norm": 0.6171951293945312, | |
| "learning_rate": 7.57560073629581e-07, | |
| "loss": 0.0085, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.378157693289104, | |
| "grad_norm": 0.4204012155532837, | |
| "learning_rate": 7.51653009208757e-07, | |
| "loss": 0.0057, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.380709364633835, | |
| "grad_norm": 0.4826784133911133, | |
| "learning_rate": 7.45765765047375e-07, | |
| "loss": 0.0075, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.383261035978566, | |
| "grad_norm": 0.6717602610588074, | |
| "learning_rate": 7.398983930443199e-07, | |
| "loss": 0.0076, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.385812707323297, | |
| "grad_norm": 0.3673340082168579, | |
| "learning_rate": 7.340509449232983e-07, | |
| "loss": 0.0057, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.3883643786680278, | |
| "grad_norm": 0.5914478898048401, | |
| "learning_rate": 7.282234722323755e-07, | |
| "loss": 0.0086, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.390916050012758, | |
| "grad_norm": 0.41728970408439636, | |
| "learning_rate": 7.224160263435226e-07, | |
| "loss": 0.0092, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 2.393467721357489, | |
| "grad_norm": 0.5362657308578491, | |
| "learning_rate": 7.166286584521685e-07, | |
| "loss": 0.0076, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 2.39601939270222, | |
| "grad_norm": 0.5463047027587891, | |
| "learning_rate": 7.108614195767432e-07, | |
| "loss": 0.0094, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 2.398571064046951, | |
| "grad_norm": 0.33496329188346863, | |
| "learning_rate": 7.051143605582279e-07, | |
| "loss": 0.0087, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.4011227353916818, | |
| "grad_norm": 1.052742838859558, | |
| "learning_rate": 6.99387532059712e-07, | |
| "loss": 0.0087, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 2.403674406736412, | |
| "grad_norm": 1.4576879739761353, | |
| "learning_rate": 6.936809845659409e-07, | |
| "loss": 0.0088, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.406226078081143, | |
| "grad_norm": 0.5559976696968079, | |
| "learning_rate": 6.879947683828726e-07, | |
| "loss": 0.008, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 2.408777749425874, | |
| "grad_norm": 0.45263102650642395, | |
| "learning_rate": 6.823289336372369e-07, | |
| "loss": 0.0083, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 2.411329420770605, | |
| "grad_norm": 0.7428113222122192, | |
| "learning_rate": 6.766835302760911e-07, | |
| "loss": 0.0077, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.4138810921153357, | |
| "grad_norm": 0.30585309863090515, | |
| "learning_rate": 6.710586080663781e-07, | |
| "loss": 0.0073, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 2.416432763460066, | |
| "grad_norm": 0.5510261654853821, | |
| "learning_rate": 6.654542165944923e-07, | |
| "loss": 0.0076, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 2.418984434804797, | |
| "grad_norm": 1.1228365898132324, | |
| "learning_rate": 6.598704052658379e-07, | |
| "loss": 0.0076, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.421536106149528, | |
| "grad_norm": 0.3657705783843994, | |
| "learning_rate": 6.543072233043939e-07, | |
| "loss": 0.0097, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 2.424087777494259, | |
| "grad_norm": 0.2254849523305893, | |
| "learning_rate": 6.487647197522862e-07, | |
| "loss": 0.0079, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.4266394488389897, | |
| "grad_norm": 0.4132424294948578, | |
| "learning_rate": 6.43242943469347e-07, | |
| "loss": 0.0086, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 2.42919112018372, | |
| "grad_norm": 0.5428745150566101, | |
| "learning_rate": 6.377419431326867e-07, | |
| "loss": 0.0077, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 2.431742791528451, | |
| "grad_norm": 0.7623319625854492, | |
| "learning_rate": 6.322617672362698e-07, | |
| "loss": 0.0066, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 2.434294462873182, | |
| "grad_norm": 0.4517093300819397, | |
| "learning_rate": 6.2680246409048e-07, | |
| "loss": 0.0059, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.436846134217913, | |
| "grad_norm": 0.46577808260917664, | |
| "learning_rate": 6.213640818216965e-07, | |
| "loss": 0.0104, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.4393978055626437, | |
| "grad_norm": 0.43454670906066895, | |
| "learning_rate": 6.159466683718768e-07, | |
| "loss": 0.0072, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 2.441949476907374, | |
| "grad_norm": 0.5216269493103027, | |
| "learning_rate": 6.105502714981227e-07, | |
| "loss": 0.0068, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 2.444501148252105, | |
| "grad_norm": 0.7452653050422668, | |
| "learning_rate": 6.05174938772266e-07, | |
| "loss": 0.0062, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 2.447052819596836, | |
| "grad_norm": 0.6804510354995728, | |
| "learning_rate": 5.998207175804498e-07, | |
| "loss": 0.0065, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 2.449604490941567, | |
| "grad_norm": 0.6842300891876221, | |
| "learning_rate": 5.94487655122706e-07, | |
| "loss": 0.0079, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.4521561622862977, | |
| "grad_norm": 1.2596189975738525, | |
| "learning_rate": 5.891757984125443e-07, | |
| "loss": 0.008, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 2.454707833631028, | |
| "grad_norm": 1.110859751701355, | |
| "learning_rate": 5.838851942765357e-07, | |
| "loss": 0.0077, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 2.457259504975759, | |
| "grad_norm": 0.23649120330810547, | |
| "learning_rate": 5.786158893538982e-07, | |
| "loss": 0.008, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 2.45981117632049, | |
| "grad_norm": 0.49982666969299316, | |
| "learning_rate": 5.733679300960864e-07, | |
| "loss": 0.0079, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 2.462362847665221, | |
| "grad_norm": 1.2177993059158325, | |
| "learning_rate": 5.681413627663851e-07, | |
| "loss": 0.0078, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.4649145190099517, | |
| "grad_norm": 0.22220726311206818, | |
| "learning_rate": 5.629362334394964e-07, | |
| "loss": 0.0069, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.467466190354682, | |
| "grad_norm": 0.49332597851753235, | |
| "learning_rate": 5.577525880011379e-07, | |
| "loss": 0.0077, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 2.470017861699413, | |
| "grad_norm": 1.0104281902313232, | |
| "learning_rate": 5.525904721476366e-07, | |
| "loss": 0.0086, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 2.472569533044144, | |
| "grad_norm": 0.4739457964897156, | |
| "learning_rate": 5.474499313855242e-07, | |
| "loss": 0.0066, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 2.475121204388875, | |
| "grad_norm": 0.38137850165367126, | |
| "learning_rate": 5.423310110311376e-07, | |
| "loss": 0.0065, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.4776728757336057, | |
| "grad_norm": 0.4624011814594269, | |
| "learning_rate": 5.372337562102222e-07, | |
| "loss": 0.009, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 2.480224547078336, | |
| "grad_norm": 0.46692025661468506, | |
| "learning_rate": 5.32158211857527e-07, | |
| "loss": 0.006, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 2.482776218423067, | |
| "grad_norm": 1.0589154958724976, | |
| "learning_rate": 5.271044227164167e-07, | |
| "loss": 0.0084, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 2.485327889767798, | |
| "grad_norm": 0.606287956237793, | |
| "learning_rate": 5.22072433338472e-07, | |
| "loss": 0.0072, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 2.487879561112529, | |
| "grad_norm": 0.5379163026809692, | |
| "learning_rate": 5.170622880830971e-07, | |
| "loss": 0.0075, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.4904312324572597, | |
| "grad_norm": 0.6357580423355103, | |
| "learning_rate": 5.120740311171315e-07, | |
| "loss": 0.0065, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 2.49298290380199, | |
| "grad_norm": 1.1960883140563965, | |
| "learning_rate": 5.071077064144577e-07, | |
| "loss": 0.0078, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 2.495534575146721, | |
| "grad_norm": 0.9033713340759277, | |
| "learning_rate": 5.021633577556143e-07, | |
| "loss": 0.007, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 2.498086246491452, | |
| "grad_norm": 0.6243811845779419, | |
| "learning_rate": 4.97241028727412e-07, | |
| "loss": 0.0066, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 2.500637917836183, | |
| "grad_norm": 0.4687057435512543, | |
| "learning_rate": 4.923407627225478e-07, | |
| "loss": 0.0076, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.5031895891809137, | |
| "grad_norm": 0.3284855782985687, | |
| "learning_rate": 4.874626029392212e-07, | |
| "loss": 0.0063, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 2.505741260525644, | |
| "grad_norm": 1.1885757446289062, | |
| "learning_rate": 4.826065923807561e-07, | |
| "loss": 0.0087, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 2.508292931870375, | |
| "grad_norm": 1.5649694204330444, | |
| "learning_rate": 4.777727738552193e-07, | |
| "loss": 0.0097, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 2.510844603215106, | |
| "grad_norm": 0.2750914692878723, | |
| "learning_rate": 4.7296118997504413e-07, | |
| "loss": 0.0075, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 2.513396274559837, | |
| "grad_norm": 0.6689438819885254, | |
| "learning_rate": 4.681718831566558e-07, | |
| "loss": 0.0093, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.5159479459045677, | |
| "grad_norm": 0.7268848419189453, | |
| "learning_rate": 4.6340489562009657e-07, | |
| "loss": 0.009, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 2.518499617249298, | |
| "grad_norm": 0.2928844690322876, | |
| "learning_rate": 4.5866026938865157e-07, | |
| "loss": 0.0074, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 2.521051288594029, | |
| "grad_norm": 0.3780820369720459, | |
| "learning_rate": 4.5393804628848333e-07, | |
| "loss": 0.0081, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 2.52360295993876, | |
| "grad_norm": 0.5721049308776855, | |
| "learning_rate": 4.492382679482574e-07, | |
| "loss": 0.0073, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 2.526154631283491, | |
| "grad_norm": 0.4491700530052185, | |
| "learning_rate": 4.445609757987782e-07, | |
| "loss": 0.0068, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.5287063026282217, | |
| "grad_norm": 0.21525029838085175, | |
| "learning_rate": 4.3990621107262476e-07, | |
| "loss": 0.0074, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 2.531257973972952, | |
| "grad_norm": 1.5350968837738037, | |
| "learning_rate": 4.352740148037866e-07, | |
| "loss": 0.0093, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 2.533809645317683, | |
| "grad_norm": 0.6345850825309753, | |
| "learning_rate": 4.3066442782729876e-07, | |
| "loss": 0.0076, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 2.536361316662414, | |
| "grad_norm": 0.23979954421520233, | |
| "learning_rate": 4.260774907788877e-07, | |
| "loss": 0.0074, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 2.538912988007145, | |
| "grad_norm": 0.5163211226463318, | |
| "learning_rate": 4.215132440946076e-07, | |
| "loss": 0.0066, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.5414646593518757, | |
| "grad_norm": 0.9633673429489136, | |
| "learning_rate": 4.1697172801048587e-07, | |
| "loss": 0.0072, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 2.544016330696606, | |
| "grad_norm": 0.5062496066093445, | |
| "learning_rate": 4.1245298256217114e-07, | |
| "loss": 0.0071, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 2.546568002041337, | |
| "grad_norm": 1.0265597105026245, | |
| "learning_rate": 4.0795704758457666e-07, | |
| "loss": 0.0084, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 2.549119673386068, | |
| "grad_norm": 0.6191508173942566, | |
| "learning_rate": 4.03483962711529e-07, | |
| "loss": 0.0065, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 2.551671344730799, | |
| "grad_norm": 0.36056914925575256, | |
| "learning_rate": 3.9903376737542316e-07, | |
| "loss": 0.0077, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.5542230160755297, | |
| "grad_norm": 0.43993762135505676, | |
| "learning_rate": 3.94606500806869e-07, | |
| "loss": 0.0076, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 2.55677468742026, | |
| "grad_norm": 0.5819681286811829, | |
| "learning_rate": 3.902022020343492e-07, | |
| "loss": 0.0081, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 2.559326358764991, | |
| "grad_norm": 0.878422737121582, | |
| "learning_rate": 3.8582090988387455e-07, | |
| "loss": 0.0078, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 2.561878030109722, | |
| "grad_norm": 0.5557768940925598, | |
| "learning_rate": 3.814626629786423e-07, | |
| "loss": 0.0064, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 2.564429701454453, | |
| "grad_norm": 0.6650263667106628, | |
| "learning_rate": 3.7712749973869264e-07, | |
| "loss": 0.0087, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 2.5669813727991837, | |
| "grad_norm": 1.7049756050109863, | |
| "learning_rate": 3.728154583805745e-07, | |
| "loss": 0.0091, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 2.569533044143914, | |
| "grad_norm": 0.9691872596740723, | |
| "learning_rate": 3.685265769170042e-07, | |
| "loss": 0.0071, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 2.572084715488645, | |
| "grad_norm": 0.47858548164367676, | |
| "learning_rate": 3.64260893156533e-07, | |
| "loss": 0.008, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 2.574636386833376, | |
| "grad_norm": 0.4813213050365448, | |
| "learning_rate": 3.600184447032141e-07, | |
| "loss": 0.0066, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 2.577188058178107, | |
| "grad_norm": 0.6237726807594299, | |
| "learning_rate": 3.557992689562709e-07, | |
| "loss": 0.0066, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 2.5797397295228377, | |
| "grad_norm": 0.46429261565208435, | |
| "learning_rate": 3.516034031097639e-07, | |
| "loss": 0.0068, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 2.582291400867568, | |
| "grad_norm": 0.524320125579834, | |
| "learning_rate": 3.4743088415226895e-07, | |
| "loss": 0.007, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 2.584843072212299, | |
| "grad_norm": 0.6312581300735474, | |
| "learning_rate": 3.432817488665456e-07, | |
| "loss": 0.0076, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 2.58739474355703, | |
| "grad_norm": 0.5773522257804871, | |
| "learning_rate": 3.391560338292152e-07, | |
| "loss": 0.0071, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 2.589946414901761, | |
| "grad_norm": 0.2685536742210388, | |
| "learning_rate": 3.3505377541043903e-07, | |
| "loss": 0.0075, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 2.5924980862464917, | |
| "grad_norm": 0.7439098358154297, | |
| "learning_rate": 3.309750097735983e-07, | |
| "loss": 0.0074, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 2.595049757591222, | |
| "grad_norm": 0.8754897713661194, | |
| "learning_rate": 3.2691977287497067e-07, | |
| "loss": 0.006, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 2.597601428935953, | |
| "grad_norm": 0.4401561915874481, | |
| "learning_rate": 3.2288810046342035e-07, | |
| "loss": 0.007, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 2.600153100280684, | |
| "grad_norm": 0.4255291521549225, | |
| "learning_rate": 3.1888002808007633e-07, | |
| "loss": 0.0061, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 2.602704771625415, | |
| "grad_norm": 0.1766541600227356, | |
| "learning_rate": 3.1489559105802413e-07, | |
| "loss": 0.0062, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 2.6052564429701457, | |
| "grad_norm": 0.962647020816803, | |
| "learning_rate": 3.1093482452199053e-07, | |
| "loss": 0.0075, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 2.607808114314876, | |
| "grad_norm": 0.5156567692756653, | |
| "learning_rate": 3.069977633880369e-07, | |
| "loss": 0.0065, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 2.610359785659607, | |
| "grad_norm": 0.6798850297927856, | |
| "learning_rate": 3.030844423632491e-07, | |
| "loss": 0.007, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 2.612911457004338, | |
| "grad_norm": 1.1052918434143066, | |
| "learning_rate": 2.99194895945434e-07, | |
| "loss": 0.0075, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 2.615463128349069, | |
| "grad_norm": 0.40075254440307617, | |
| "learning_rate": 2.9532915842281137e-07, | |
| "loss": 0.0077, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 2.6180147996937997, | |
| "grad_norm": 0.48784929513931274, | |
| "learning_rate": 2.914872638737169e-07, | |
| "loss": 0.0088, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 2.62056647103853, | |
| "grad_norm": 1.6315809488296509, | |
| "learning_rate": 2.8766924616629697e-07, | |
| "loss": 0.0062, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 2.623118142383261, | |
| "grad_norm": 0.5205185413360596, | |
| "learning_rate": 2.838751389582135e-07, | |
| "loss": 0.0054, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 2.625669813727992, | |
| "grad_norm": 0.6846718192100525, | |
| "learning_rate": 2.8010497569634475e-07, | |
| "loss": 0.0064, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 2.628221485072723, | |
| "grad_norm": 0.5500282645225525, | |
| "learning_rate": 2.763587896164922e-07, | |
| "loss": 0.0063, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 2.6307731564174537, | |
| "grad_norm": 11.970605850219727, | |
| "learning_rate": 2.726366137430867e-07, | |
| "loss": 0.0072, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 2.633324827762184, | |
| "grad_norm": 0.7035308480262756, | |
| "learning_rate": 2.6893848088889836e-07, | |
| "loss": 0.0063, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 2.635876499106915, | |
| "grad_norm": 1.363316535949707, | |
| "learning_rate": 2.65264423654745e-07, | |
| "loss": 0.0078, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 2.638428170451646, | |
| "grad_norm": 0.6791175007820129, | |
| "learning_rate": 2.6161447442920737e-07, | |
| "loss": 0.0066, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 2.640979841796377, | |
| "grad_norm": 1.1736422777175903, | |
| "learning_rate": 2.57988665388343e-07, | |
| "loss": 0.0078, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 2.6435315131411077, | |
| "grad_norm": 0.6160316467285156, | |
| "learning_rate": 2.543870284954012e-07, | |
| "loss": 0.0067, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 2.646083184485838, | |
| "grad_norm": 1.1056572198867798, | |
| "learning_rate": 2.508095955005415e-07, | |
| "loss": 0.0075, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 2.648634855830569, | |
| "grad_norm": 0.4946836233139038, | |
| "learning_rate": 2.472563979405561e-07, | |
| "loss": 0.0056, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 2.6511865271753, | |
| "grad_norm": 0.4888780117034912, | |
| "learning_rate": 2.437274671385891e-07, | |
| "loss": 0.0073, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 2.6537381985200303, | |
| "grad_norm": 0.7349345684051514, | |
| "learning_rate": 2.402228342038619e-07, | |
| "loss": 0.0067, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 2.6562898698647617, | |
| "grad_norm": 1.420432448387146, | |
| "learning_rate": 2.3674253003139888e-07, | |
| "loss": 0.0097, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 2.658841541209492, | |
| "grad_norm": 0.36828410625457764, | |
| "learning_rate": 2.3328658530175373e-07, | |
| "loss": 0.0052, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 2.661393212554223, | |
| "grad_norm": 1.2144927978515625, | |
| "learning_rate": 2.2985503048073997e-07, | |
| "loss": 0.0071, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 2.663944883898954, | |
| "grad_norm": 0.6407505869865417, | |
| "learning_rate": 2.264478958191638e-07, | |
| "loss": 0.0076, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 2.6664965552436843, | |
| "grad_norm": 0.5099070072174072, | |
| "learning_rate": 2.2306521135255398e-07, | |
| "loss": 0.0066, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 2.6690482265884157, | |
| "grad_norm": 0.44241103529930115, | |
| "learning_rate": 2.197070069009003e-07, | |
| "loss": 0.0078, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 2.671599897933146, | |
| "grad_norm": 0.49996262788772583, | |
| "learning_rate": 2.1637331206838983e-07, | |
| "loss": 0.0081, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 2.674151569277877, | |
| "grad_norm": 0.3228076696395874, | |
| "learning_rate": 2.130641562431449e-07, | |
| "loss": 0.006, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 2.676703240622608, | |
| "grad_norm": 0.9245404601097107, | |
| "learning_rate": 2.0977956859696345e-07, | |
| "loss": 0.0086, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 2.6792549119673383, | |
| "grad_norm": 0.51729816198349, | |
| "learning_rate": 2.0651957808506604e-07, | |
| "loss": 0.007, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.6818065833120697, | |
| "grad_norm": 0.7117488384246826, | |
| "learning_rate": 2.032842134458347e-07, | |
| "loss": 0.0063, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 2.6843582546568, | |
| "grad_norm": 0.7258452773094177, | |
| "learning_rate": 2.000735032005644e-07, | |
| "loss": 0.0083, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 2.686909926001531, | |
| "grad_norm": 0.3727307617664337, | |
| "learning_rate": 1.9688747565320964e-07, | |
| "loss": 0.0068, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 2.689461597346262, | |
| "grad_norm": 0.7492243647575378, | |
| "learning_rate": 1.9372615889013434e-07, | |
| "loss": 0.0059, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 2.6920132686909923, | |
| "grad_norm": 0.6845988035202026, | |
| "learning_rate": 1.9058958077986498e-07, | |
| "loss": 0.0065, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 2.6945649400357237, | |
| "grad_norm": 0.4264490604400635, | |
| "learning_rate": 1.8747776897284586e-07, | |
| "loss": 0.0074, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 2.697116611380454, | |
| "grad_norm": 1.5105531215667725, | |
| "learning_rate": 1.8439075090119318e-07, | |
| "loss": 0.0064, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 2.699668282725185, | |
| "grad_norm": 0.4452486038208008, | |
| "learning_rate": 1.8132855377845537e-07, | |
| "loss": 0.006, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 2.702219954069916, | |
| "grad_norm": 0.40780559182167053, | |
| "learning_rate": 1.7829120459937255e-07, | |
| "loss": 0.0068, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 2.7047716254146463, | |
| "grad_norm": 0.5933582782745361, | |
| "learning_rate": 1.7527873013963734e-07, | |
| "loss": 0.0061, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 2.7073232967593777, | |
| "grad_norm": 0.36882108449935913, | |
| "learning_rate": 1.7229115695565955e-07, | |
| "loss": 0.0054, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 2.709874968104108, | |
| "grad_norm": 0.6598870754241943, | |
| "learning_rate": 1.6932851138433346e-07, | |
| "loss": 0.0068, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 2.712426639448839, | |
| "grad_norm": 0.6586048007011414, | |
| "learning_rate": 1.6639081954280354e-07, | |
| "loss": 0.0067, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 2.71497831079357, | |
| "grad_norm": 1.0061739683151245, | |
| "learning_rate": 1.6347810732823521e-07, | |
| "loss": 0.0078, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 2.7175299821383003, | |
| "grad_norm": 0.22482961416244507, | |
| "learning_rate": 1.6059040041758765e-07, | |
| "loss": 0.0058, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 2.7200816534830317, | |
| "grad_norm": 0.737498939037323, | |
| "learning_rate": 1.577277242673848e-07, | |
| "loss": 0.007, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 2.722633324827762, | |
| "grad_norm": 0.8647279739379883, | |
| "learning_rate": 1.5489010411349237e-07, | |
| "loss": 0.0068, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 2.725184996172493, | |
| "grad_norm": 0.8353618383407593, | |
| "learning_rate": 1.5207756497089643e-07, | |
| "loss": 0.0073, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 2.727736667517224, | |
| "grad_norm": 0.6162010431289673, | |
| "learning_rate": 1.492901316334816e-07, | |
| "loss": 0.0063, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 2.7302883388619543, | |
| "grad_norm": 0.38633957505226135, | |
| "learning_rate": 1.465278286738122e-07, | |
| "loss": 0.0061, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 2.7328400102066857, | |
| "grad_norm": 0.9637838006019592, | |
| "learning_rate": 1.4379068044291744e-07, | |
| "loss": 0.0077, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 2.735391681551416, | |
| "grad_norm": 0.8239974975585938, | |
| "learning_rate": 1.4107871107007387e-07, | |
| "loss": 0.0069, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 2.737943352896147, | |
| "grad_norm": 0.8153387308120728, | |
| "learning_rate": 1.3839194446259494e-07, | |
| "loss": 0.0077, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 2.740495024240878, | |
| "grad_norm": 0.6699053645133972, | |
| "learning_rate": 1.3573040430561988e-07, | |
| "loss": 0.0058, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 2.7430466955856083, | |
| "grad_norm": 0.5199264287948608, | |
| "learning_rate": 1.330941140619051e-07, | |
| "loss": 0.0074, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 2.745598366930339, | |
| "grad_norm": 0.9827825427055359, | |
| "learning_rate": 1.3048309697161563e-07, | |
| "loss": 0.0048, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 2.74815003827507, | |
| "grad_norm": 0.7316129207611084, | |
| "learning_rate": 1.278973760521226e-07, | |
| "loss": 0.007, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 2.750701709619801, | |
| "grad_norm": 0.5252894759178162, | |
| "learning_rate": 1.253369740977983e-07, | |
| "loss": 0.0054, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 2.753253380964532, | |
| "grad_norm": 0.8061007261276245, | |
| "learning_rate": 1.228019136798174e-07, | |
| "loss": 0.0056, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 2.7558050523092623, | |
| "grad_norm": 0.6680382490158081, | |
| "learning_rate": 1.2029221714595518e-07, | |
| "loss": 0.0079, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 2.758356723653993, | |
| "grad_norm": 0.6302357316017151, | |
| "learning_rate": 1.1780790662039342e-07, | |
| "loss": 0.0066, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 2.760908394998724, | |
| "grad_norm": 0.35548314452171326, | |
| "learning_rate": 1.1534900400352332e-07, | |
| "loss": 0.0051, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 2.763460066343455, | |
| "grad_norm": 0.7780895233154297, | |
| "learning_rate": 1.1291553097175388e-07, | |
| "loss": 0.0059, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 2.766011737688186, | |
| "grad_norm": 0.5007578134536743, | |
| "learning_rate": 1.1050750897731877e-07, | |
| "loss": 0.0058, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 2.7685634090329163, | |
| "grad_norm": 0.6232514381408691, | |
| "learning_rate": 1.0812495924809007e-07, | |
| "loss": 0.0066, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 2.771115080377647, | |
| "grad_norm": 0.672356367111206, | |
| "learning_rate": 1.0576790278738824e-07, | |
| "loss": 0.0051, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 2.773666751722378, | |
| "grad_norm": 0.34914809465408325, | |
| "learning_rate": 1.0343636037379978e-07, | |
| "loss": 0.0063, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 2.776218423067109, | |
| "grad_norm": 1.2736538648605347, | |
| "learning_rate": 1.011303525609908e-07, | |
| "loss": 0.0059, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 2.77877009441184, | |
| "grad_norm": 1.2168415784835815, | |
| "learning_rate": 9.884989967752944e-08, | |
| "loss": 0.0062, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 2.7813217657565703, | |
| "grad_norm": 0.1484823226928711, | |
| "learning_rate": 9.659502182670311e-08, | |
| "loss": 0.0045, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 2.783873437101301, | |
| "grad_norm": 0.7366049289703369, | |
| "learning_rate": 9.436573888634481e-08, | |
| "loss": 0.0069, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 2.786425108446032, | |
| "grad_norm": 0.9276716113090515, | |
| "learning_rate": 9.216207050865478e-08, | |
| "loss": 0.0058, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 2.788976779790763, | |
| "grad_norm": 1.392797589302063, | |
| "learning_rate": 8.998403612002948e-08, | |
| "loss": 0.0063, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 2.791528451135494, | |
| "grad_norm": 0.40872761607170105, | |
| "learning_rate": 8.783165492088785e-08, | |
| "loss": 0.0064, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 2.7940801224802243, | |
| "grad_norm": 0.910552442073822, | |
| "learning_rate": 8.570494588550604e-08, | |
| "loss": 0.007, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 2.796631793824955, | |
| "grad_norm": 0.8210703730583191, | |
| "learning_rate": 8.360392776184556e-08, | |
| "loss": 0.0069, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 2.799183465169686, | |
| "grad_norm": 0.6619951128959656, | |
| "learning_rate": 8.152861907139176e-08, | |
| "loss": 0.0068, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 2.801735136514417, | |
| "grad_norm": 0.41350725293159485, | |
| "learning_rate": 7.94790381089876e-08, | |
| "loss": 0.0058, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 2.804286807859148, | |
| "grad_norm": 0.4920821785926819, | |
| "learning_rate": 7.74552029426745e-08, | |
| "loss": 0.0057, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 2.8068384792038783, | |
| "grad_norm": 0.4195524752140045, | |
| "learning_rate": 7.545713141353305e-08, | |
| "loss": 0.0048, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.809390150548609, | |
| "grad_norm": 0.3614073395729065, | |
| "learning_rate": 7.348484113552356e-08, | |
| "loss": 0.0068, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 2.81194182189334, | |
| "grad_norm": 0.5706660747528076, | |
| "learning_rate": 7.153834949533278e-08, | |
| "loss": 0.0062, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 2.814493493238071, | |
| "grad_norm": 1.5906177759170532, | |
| "learning_rate": 6.961767365222072e-08, | |
| "loss": 0.0065, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 2.817045164582802, | |
| "grad_norm": 0.7011880278587341, | |
| "learning_rate": 6.77228305378671e-08, | |
| "loss": 0.0065, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 2.8195968359275323, | |
| "grad_norm": 0.9448215961456299, | |
| "learning_rate": 6.585383685622548e-08, | |
| "loss": 0.0072, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 2.822148507272263, | |
| "grad_norm": 0.4587925374507904, | |
| "learning_rate": 6.401070908337403e-08, | |
| "loss": 0.0064, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 2.824700178616994, | |
| "grad_norm": 0.1932133436203003, | |
| "learning_rate": 6.219346346736932e-08, | |
| "loss": 0.0054, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 2.827251849961725, | |
| "grad_norm": 0.614255428314209, | |
| "learning_rate": 6.040211602810475e-08, | |
| "loss": 0.0055, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 2.829803521306456, | |
| "grad_norm": 1.1453063488006592, | |
| "learning_rate": 5.86366825571697e-08, | |
| "loss": 0.0071, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 2.8323551926511863, | |
| "grad_norm": 0.9590531587600708, | |
| "learning_rate": 5.6897178617708267e-08, | |
| "loss": 0.0066, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 2.834906863995917, | |
| "grad_norm": 0.6798762083053589, | |
| "learning_rate": 5.518361954428408e-08, | |
| "loss": 0.0065, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 2.837458535340648, | |
| "grad_norm": 0.6563116312026978, | |
| "learning_rate": 5.349602044274404e-08, | |
| "loss": 0.0084, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 2.840010206685379, | |
| "grad_norm": 0.479773610830307, | |
| "learning_rate": 5.1834396190085456e-08, | |
| "loss": 0.0066, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 2.84256187803011, | |
| "grad_norm": 0.5942010283470154, | |
| "learning_rate": 5.019876143432445e-08, | |
| "loss": 0.007, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 2.8451135493748403, | |
| "grad_norm": 1.0438846349716187, | |
| "learning_rate": 4.858913059436809e-08, | |
| "loss": 0.0077, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 2.847665220719571, | |
| "grad_norm": 0.9194878935813904, | |
| "learning_rate": 4.7005517859885493e-08, | |
| "loss": 0.0059, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 2.850216892064302, | |
| "grad_norm": 1.6355482339859009, | |
| "learning_rate": 4.544793719118423e-08, | |
| "loss": 0.007, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 2.852768563409033, | |
| "grad_norm": 0.43507319688796997, | |
| "learning_rate": 4.3916402319087115e-08, | |
| "loss": 0.0071, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 2.855320234753764, | |
| "grad_norm": 0.5874977707862854, | |
| "learning_rate": 4.241092674480962e-08, | |
| "loss": 0.0066, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 2.8578719060984943, | |
| "grad_norm": 0.5329188704490662, | |
| "learning_rate": 4.0931523739842326e-08, | |
| "loss": 0.0054, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 2.860423577443225, | |
| "grad_norm": 0.5945627093315125, | |
| "learning_rate": 3.9478206345834656e-08, | |
| "loss": 0.0072, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 2.862975248787956, | |
| "grad_norm": 0.42057672142982483, | |
| "learning_rate": 3.805098737447732e-08, | |
| "loss": 0.0051, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 2.865526920132687, | |
| "grad_norm": 0.19661393761634827, | |
| "learning_rate": 3.664987940739206e-08, | |
| "loss": 0.006, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 2.868078591477418, | |
| "grad_norm": 0.8557282090187073, | |
| "learning_rate": 3.5274894796018756e-08, | |
| "loss": 0.0056, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 2.8706302628221483, | |
| "grad_norm": 0.543337881565094, | |
| "learning_rate": 3.392604566150781e-08, | |
| "loss": 0.0047, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 2.873181934166879, | |
| "grad_norm": 0.9113593101501465, | |
| "learning_rate": 3.260334389461261e-08, | |
| "loss": 0.0066, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 2.87573360551161, | |
| "grad_norm": 0.442043274641037, | |
| "learning_rate": 3.130680115558493e-08, | |
| "loss": 0.0058, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 2.878285276856341, | |
| "grad_norm": 0.7696757316589355, | |
| "learning_rate": 3.003642887407232e-08, | |
| "loss": 0.0065, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 2.880836948201072, | |
| "grad_norm": 0.9663532376289368, | |
| "learning_rate": 2.879223824901722e-08, | |
| "loss": 0.0066, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 2.8833886195458023, | |
| "grad_norm": 0.40540120005607605, | |
| "learning_rate": 2.7574240248558036e-08, | |
| "loss": 0.0076, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 2.885940290890533, | |
| "grad_norm": 0.1316661834716797, | |
| "learning_rate": 2.6382445609932527e-08, | |
| "loss": 0.0042, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 2.888491962235264, | |
| "grad_norm": 0.5307180285453796, | |
| "learning_rate": 2.5216864839382903e-08, | |
| "loss": 0.005, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 2.891043633579995, | |
| "grad_norm": 0.6010832190513611, | |
| "learning_rate": 2.407750821206456e-08, | |
| "loss": 0.0071, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 2.893595304924726, | |
| "grad_norm": 0.35675570368766785, | |
| "learning_rate": 2.296438577195348e-08, | |
| "loss": 0.0057, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 2.8961469762694563, | |
| "grad_norm": 0.5174592733383179, | |
| "learning_rate": 2.1877507331758973e-08, | |
| "loss": 0.0075, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 2.898698647614187, | |
| "grad_norm": 2.2227537631988525, | |
| "learning_rate": 2.0816882472837084e-08, | |
| "loss": 0.0055, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 2.901250318958918, | |
| "grad_norm": 0.5491740107536316, | |
| "learning_rate": 1.9782520545105987e-08, | |
| "loss": 0.0058, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 2.903801990303649, | |
| "grad_norm": 0.6968376040458679, | |
| "learning_rate": 1.8774430666962383e-08, | |
| "loss": 0.0043, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 2.90635366164838, | |
| "grad_norm": 0.35690832138061523, | |
| "learning_rate": 1.779262172520424e-08, | |
| "loss": 0.0066, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 2.9089053329931103, | |
| "grad_norm": 0.6748278141021729, | |
| "learning_rate": 1.683710237494851e-08, | |
| "loss": 0.007, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 2.911457004337841, | |
| "grad_norm": 0.7347134947776794, | |
| "learning_rate": 1.590788103955787e-08, | |
| "loss": 0.0078, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 2.914008675682572, | |
| "grad_norm": 0.6468586325645447, | |
| "learning_rate": 1.5004965910564772e-08, | |
| "loss": 0.0068, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 2.916560347027303, | |
| "grad_norm": 0.6974029541015625, | |
| "learning_rate": 1.4128364947600503e-08, | |
| "loss": 0.0069, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 2.919112018372034, | |
| "grad_norm": 0.43353763222694397, | |
| "learning_rate": 1.3278085878323576e-08, | |
| "loss": 0.0043, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 2.9216636897167643, | |
| "grad_norm": 0.5207705497741699, | |
| "learning_rate": 1.2454136198353117e-08, | |
| "loss": 0.0058, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 2.924215361061495, | |
| "grad_norm": 0.6456037759780884, | |
| "learning_rate": 1.1656523171200916e-08, | |
| "loss": 0.0063, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 2.926767032406226, | |
| "grad_norm": 0.5657256841659546, | |
| "learning_rate": 1.0885253828209818e-08, | |
| "loss": 0.0053, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 2.929318703750957, | |
| "grad_norm": 0.7830215692520142, | |
| "learning_rate": 1.0140334968489762e-08, | |
| "loss": 0.0053, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 2.931870375095688, | |
| "grad_norm": 0.30181020498275757, | |
| "learning_rate": 9.421773158857838e-09, | |
| "loss": 0.008, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 2.9344220464404183, | |
| "grad_norm": 1.1031672954559326, | |
| "learning_rate": 8.729574733782663e-09, | |
| "loss": 0.0062, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 2.936973717785149, | |
| "grad_norm": 0.9583958983421326, | |
| "learning_rate": 8.063745795325761e-09, | |
| "loss": 0.0062, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 2.93952538912988, | |
| "grad_norm": 0.7751434445381165, | |
| "learning_rate": 7.424292213089268e-09, | |
| "loss": 0.0073, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 2.942077060474611, | |
| "grad_norm": 0.9476333260536194, | |
| "learning_rate": 6.811219624163645e-09, | |
| "loss": 0.0046, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 2.944628731819342, | |
| "grad_norm": 0.5230672359466553, | |
| "learning_rate": 6.224533433079049e-09, | |
| "loss": 0.0068, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 2.9471804031640723, | |
| "grad_norm": 1.2709357738494873, | |
| "learning_rate": 5.664238811755373e-09, | |
| "loss": 0.0067, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 2.949732074508803, | |
| "grad_norm": 0.8602284789085388, | |
| "learning_rate": 5.130340699459946e-09, | |
| "loss": 0.0074, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 2.952283745853534, | |
| "grad_norm": 0.2690794765949249, | |
| "learning_rate": 4.622843802760568e-09, | |
| "loss": 0.0041, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 2.954835417198265, | |
| "grad_norm": 0.5806980133056641, | |
| "learning_rate": 4.141752595486547e-09, | |
| "loss": 0.0059, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 2.957387088542996, | |
| "grad_norm": 0.5464335680007935, | |
| "learning_rate": 3.6870713186883954e-09, | |
| "loss": 0.0073, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 2.9599387598877263, | |
| "grad_norm": 0.3500746190547943, | |
| "learning_rate": 3.258803980599856e-09, | |
| "loss": 0.0058, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 2.962490431232457, | |
| "grad_norm": 1.2113854885101318, | |
| "learning_rate": 2.85695435660327e-09, | |
| "loss": 0.0074, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 2.965042102577188, | |
| "grad_norm": 0.7115603685379028, | |
| "learning_rate": 2.481525989196598e-09, | |
| "loss": 0.0069, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 2.967593773921919, | |
| "grad_norm": 0.3272109031677246, | |
| "learning_rate": 2.1325221879607835e-09, | |
| "loss": 0.0084, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 2.97014544526665, | |
| "grad_norm": 0.6895642280578613, | |
| "learning_rate": 1.8099460295324389e-09, | |
| "loss": 0.0052, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 2.9726971166113803, | |
| "grad_norm": 0.5834820866584778, | |
| "learning_rate": 1.5138003575755343e-09, | |
| "loss": 0.0072, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 2.975248787956111, | |
| "grad_norm": 1.0529255867004395, | |
| "learning_rate": 1.2440877827567531e-09, | |
| "loss": 0.006, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 2.977800459300842, | |
| "grad_norm": 0.41195112466812134, | |
| "learning_rate": 1.000810682721176e-09, | |
| "loss": 0.0082, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 2.980352130645573, | |
| "grad_norm": 0.7077150940895081, | |
| "learning_rate": 7.839712020742962e-10, | |
| "loss": 0.0054, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 2.982903801990304, | |
| "grad_norm": 0.3624398112297058, | |
| "learning_rate": 5.935712523600367e-10, | |
| "loss": 0.005, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 2.9854554733350342, | |
| "grad_norm": 0.6512740254402161, | |
| "learning_rate": 4.296125120454297e-10, | |
| "loss": 0.0066, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 2.988007144679765, | |
| "grad_norm": 0.681259036064148, | |
| "learning_rate": 2.9209642650562805e-10, | |
| "loss": 0.0066, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 2.990558816024496, | |
| "grad_norm": 0.49927040934562683, | |
| "learning_rate": 1.810242080109159e-10, | |
| "loss": 0.0078, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 2.993110487369227, | |
| "grad_norm": 0.5806398391723633, | |
| "learning_rate": 9.639683571638358e-11, | |
| "loss": 0.0061, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 2.995662158713958, | |
| "grad_norm": 0.5190763473510742, | |
| "learning_rate": 3.8215055653267886e-11, | |
| "loss": 0.0069, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 2.9982138300586882, | |
| "grad_norm": 0.4141053855419159, | |
| "learning_rate": 6.4793807212915056e-12, | |
| "loss": 0.0058, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 11757, | |
| "total_flos": 6.099787643229307e+18, | |
| "train_loss": 0.04448508482245479, | |
| "train_runtime": 11326.9906, | |
| "train_samples_per_second": 132.859, | |
| "train_steps_per_second": 1.038 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 11757, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.099787643229307e+18, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |