dialect-debiasing-gpt2-medium-translated-pnlogmse-e3-r5_eval-n10.0
/
checkpoint-9375
/trainer_state.json
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 9375, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0032, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.132196162046908e-06, | |
| "loss": 22.4733, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0064, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.264392324093816e-06, | |
| "loss": 18.8042, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0096, | |
| "grad_norm": 10.0, | |
| "learning_rate": 6.396588486140726e-06, | |
| "loss": 19.6935, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0128, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.528784648187633e-06, | |
| "loss": 19.4805, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.0660980810234541e-05, | |
| "loss": 17.4573, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0192, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 1.2793176972281452e-05, | |
| "loss": 18.3153, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0224, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 1.4925373134328357e-05, | |
| "loss": 16.6767, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0256, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.7057569296375266e-05, | |
| "loss": 14.7755, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0288, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 1.9189765458422178e-05, | |
| "loss": 14.8919, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.1321961620469083e-05, | |
| "loss": 14.1544, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0352, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 2.345415778251599e-05, | |
| "loss": 11.5455, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0384, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 2.5586353944562904e-05, | |
| "loss": 12.9077, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0416, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.771855010660981e-05, | |
| "loss": 10.382, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0448, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.9850746268656714e-05, | |
| "loss": 9.8282, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 10.0, | |
| "learning_rate": 3.1982942430703626e-05, | |
| "loss": 8.4552, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0512, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 3.411513859275053e-05, | |
| "loss": 7.5207, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0544, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 3.624733475479744e-05, | |
| "loss": 6.3137, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0576, | |
| "grad_norm": 10.0, | |
| "learning_rate": 3.8379530916844355e-05, | |
| "loss": 6.1622, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0608, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.051172707889126e-05, | |
| "loss": 5.1923, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 4.2643923240938166e-05, | |
| "loss": 4.6753, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0672, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 4.477611940298508e-05, | |
| "loss": 4.0622, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0704, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 4.690831556503198e-05, | |
| "loss": 3.3972, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0736, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 4.904051172707889e-05, | |
| "loss": 3.801, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0768, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 5.117270788912581e-05, | |
| "loss": 3.2702, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 7.396614074707031, | |
| "learning_rate": 5.330490405117271e-05, | |
| "loss": 3.1308, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.0832, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 5.543710021321962e-05, | |
| "loss": 3.002, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0864, | |
| "grad_norm": 9.55270767211914, | |
| "learning_rate": 5.756929637526652e-05, | |
| "loss": 2.9217, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.0896, | |
| "grad_norm": 7.786552906036377, | |
| "learning_rate": 5.970149253731343e-05, | |
| "loss": 2.6506, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0928, | |
| "grad_norm": 6.918946743011475, | |
| "learning_rate": 6.183368869936035e-05, | |
| "loss": 2.4098, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 7.186624050140381, | |
| "learning_rate": 6.396588486140725e-05, | |
| "loss": 2.5658, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0992, | |
| "grad_norm": 8.71078109741211, | |
| "learning_rate": 6.609808102345416e-05, | |
| "loss": 2.457, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1024, | |
| "grad_norm": 8.189196586608887, | |
| "learning_rate": 6.823027718550106e-05, | |
| "loss": 2.3824, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1056, | |
| "grad_norm": 8.256540298461914, | |
| "learning_rate": 7.036247334754798e-05, | |
| "loss": 2.1404, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1088, | |
| "grad_norm": 7.275091648101807, | |
| "learning_rate": 7.249466950959489e-05, | |
| "loss": 2.0718, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 6.85682487487793, | |
| "learning_rate": 7.46268656716418e-05, | |
| "loss": 1.8829, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1152, | |
| "grad_norm": 7.219293594360352, | |
| "learning_rate": 7.675906183368871e-05, | |
| "loss": 1.978, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1184, | |
| "grad_norm": 5.659005641937256, | |
| "learning_rate": 7.889125799573562e-05, | |
| "loss": 1.8331, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1216, | |
| "grad_norm": 8.212859153747559, | |
| "learning_rate": 8.102345415778252e-05, | |
| "loss": 1.8214, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.1248, | |
| "grad_norm": 6.701785087585449, | |
| "learning_rate": 8.315565031982943e-05, | |
| "loss": 1.7657, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 6.549901485443115, | |
| "learning_rate": 8.528784648187633e-05, | |
| "loss": 1.5835, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1312, | |
| "grad_norm": 6.749208927154541, | |
| "learning_rate": 8.742004264392325e-05, | |
| "loss": 1.6627, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1344, | |
| "grad_norm": 6.9206342697143555, | |
| "learning_rate": 8.955223880597016e-05, | |
| "loss": 1.4826, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1376, | |
| "grad_norm": 6.185229778289795, | |
| "learning_rate": 9.168443496801706e-05, | |
| "loss": 1.565, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.1408, | |
| "grad_norm": 5.636767864227295, | |
| "learning_rate": 9.381663113006397e-05, | |
| "loss": 1.5192, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 4.5931596755981445, | |
| "learning_rate": 9.594882729211087e-05, | |
| "loss": 1.5136, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.1472, | |
| "grad_norm": 5.590404987335205, | |
| "learning_rate": 9.808102345415778e-05, | |
| "loss": 1.3408, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1504, | |
| "grad_norm": 5.479445457458496, | |
| "learning_rate": 9.998877161464182e-05, | |
| "loss": 1.363, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1536, | |
| "grad_norm": 5.011209964752197, | |
| "learning_rate": 9.987648776105997e-05, | |
| "loss": 1.3165, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1568, | |
| "grad_norm": 5.077134609222412, | |
| "learning_rate": 9.97642039074781e-05, | |
| "loss": 1.1945, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 5.014697551727295, | |
| "learning_rate": 9.965192005389625e-05, | |
| "loss": 1.3948, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1632, | |
| "grad_norm": 5.540820598602295, | |
| "learning_rate": 9.95396362003144e-05, | |
| "loss": 1.2603, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.1664, | |
| "grad_norm": 4.636078834533691, | |
| "learning_rate": 9.942735234673256e-05, | |
| "loss": 1.2296, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.1696, | |
| "grad_norm": 3.649627208709717, | |
| "learning_rate": 9.931506849315069e-05, | |
| "loss": 1.1182, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.1728, | |
| "grad_norm": 6.329670429229736, | |
| "learning_rate": 9.920278463956883e-05, | |
| "loss": 1.1338, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 4.523379802703857, | |
| "learning_rate": 9.909050078598698e-05, | |
| "loss": 1.1937, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.1792, | |
| "grad_norm": 4.41312837600708, | |
| "learning_rate": 9.897821693240512e-05, | |
| "loss": 1.1864, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1824, | |
| "grad_norm": 4.43649959564209, | |
| "learning_rate": 9.886593307882327e-05, | |
| "loss": 0.9985, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1856, | |
| "grad_norm": 5.301065921783447, | |
| "learning_rate": 9.875364922524142e-05, | |
| "loss": 1.0392, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1888, | |
| "grad_norm": 4.444700717926025, | |
| "learning_rate": 9.864136537165956e-05, | |
| "loss": 1.0461, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 5.166758060455322, | |
| "learning_rate": 9.852908151807771e-05, | |
| "loss": 1.091, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.1952, | |
| "grad_norm": 4.702708721160889, | |
| "learning_rate": 9.841679766449586e-05, | |
| "loss": 1.0124, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1984, | |
| "grad_norm": 4.754012584686279, | |
| "learning_rate": 9.8304513810914e-05, | |
| "loss": 0.9061, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2016, | |
| "grad_norm": 5.569154262542725, | |
| "learning_rate": 9.819222995733213e-05, | |
| "loss": 0.9309, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2048, | |
| "grad_norm": 5.9631028175354, | |
| "learning_rate": 9.807994610375028e-05, | |
| "loss": 1.0116, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 4.581701755523682, | |
| "learning_rate": 9.796766225016843e-05, | |
| "loss": 1.0095, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2112, | |
| "grad_norm": 6.390707969665527, | |
| "learning_rate": 9.785537839658657e-05, | |
| "loss": 0.9207, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2144, | |
| "grad_norm": 6.774291038513184, | |
| "learning_rate": 9.774309454300472e-05, | |
| "loss": 0.9789, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2176, | |
| "grad_norm": 4.148468494415283, | |
| "learning_rate": 9.763081068942287e-05, | |
| "loss": 0.8843, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2208, | |
| "grad_norm": 5.775865077972412, | |
| "learning_rate": 9.751852683584101e-05, | |
| "loss": 0.8, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 4.456503391265869, | |
| "learning_rate": 9.740624298225916e-05, | |
| "loss": 0.841, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2272, | |
| "grad_norm": 5.763522148132324, | |
| "learning_rate": 9.729395912867731e-05, | |
| "loss": 0.8595, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2304, | |
| "grad_norm": 4.295327663421631, | |
| "learning_rate": 9.718167527509545e-05, | |
| "loss": 0.7896, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2336, | |
| "grad_norm": 5.505441665649414, | |
| "learning_rate": 9.706939142151358e-05, | |
| "loss": 0.7769, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2368, | |
| "grad_norm": 4.719364166259766, | |
| "learning_rate": 9.695710756793174e-05, | |
| "loss": 0.793, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 5.5545878410339355, | |
| "learning_rate": 9.684482371434989e-05, | |
| "loss": 0.7089, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.2432, | |
| "grad_norm": 3.39532732963562, | |
| "learning_rate": 9.673253986076802e-05, | |
| "loss": 0.7609, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.2464, | |
| "grad_norm": 5.826759338378906, | |
| "learning_rate": 9.662025600718617e-05, | |
| "loss": 0.7576, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.2496, | |
| "grad_norm": 8.494682312011719, | |
| "learning_rate": 9.650797215360432e-05, | |
| "loss": 0.7458, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2528, | |
| "grad_norm": 5.331541538238525, | |
| "learning_rate": 9.639568830002246e-05, | |
| "loss": 0.7262, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 4.471105098724365, | |
| "learning_rate": 9.628340444644061e-05, | |
| "loss": 0.7239, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2592, | |
| "grad_norm": 3.5750572681427, | |
| "learning_rate": 9.617112059285875e-05, | |
| "loss": 0.5949, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2624, | |
| "grad_norm": 3.5053205490112305, | |
| "learning_rate": 9.605883673927689e-05, | |
| "loss": 0.6651, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2656, | |
| "grad_norm": 4.1753950119018555, | |
| "learning_rate": 9.594655288569504e-05, | |
| "loss": 0.6412, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.2688, | |
| "grad_norm": 3.464433431625366, | |
| "learning_rate": 9.583426903211319e-05, | |
| "loss": 0.6343, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 5.177412986755371, | |
| "learning_rate": 9.572198517853134e-05, | |
| "loss": 0.6475, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2752, | |
| "grad_norm": 8.84880256652832, | |
| "learning_rate": 9.560970132494948e-05, | |
| "loss": 0.7129, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2784, | |
| "grad_norm": 3.8485910892486572, | |
| "learning_rate": 9.549741747136763e-05, | |
| "loss": 0.6157, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2816, | |
| "grad_norm": 3.6554110050201416, | |
| "learning_rate": 9.538513361778578e-05, | |
| "loss": 0.5719, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2848, | |
| "grad_norm": 4.722749710083008, | |
| "learning_rate": 9.527284976420391e-05, | |
| "loss": 0.5468, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 4.25799036026001, | |
| "learning_rate": 9.516056591062205e-05, | |
| "loss": 0.5503, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2912, | |
| "grad_norm": 3.3555567264556885, | |
| "learning_rate": 9.50482820570402e-05, | |
| "loss": 0.5266, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2944, | |
| "grad_norm": 3.671443223953247, | |
| "learning_rate": 9.493599820345834e-05, | |
| "loss": 0.4832, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.2976, | |
| "grad_norm": 7.475429058074951, | |
| "learning_rate": 9.482371434987649e-05, | |
| "loss": 0.4516, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3008, | |
| "grad_norm": 3.6318516731262207, | |
| "learning_rate": 9.471143049629464e-05, | |
| "loss": 0.4648, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 3.793696403503418, | |
| "learning_rate": 9.459914664271278e-05, | |
| "loss": 0.4942, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3072, | |
| "grad_norm": 3.818516969680786, | |
| "learning_rate": 9.448686278913093e-05, | |
| "loss": 0.4239, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3104, | |
| "grad_norm": 3.344341993331909, | |
| "learning_rate": 9.437457893554908e-05, | |
| "loss": 0.4834, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3136, | |
| "grad_norm": 3.5696985721588135, | |
| "learning_rate": 9.426229508196722e-05, | |
| "loss": 0.457, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3168, | |
| "grad_norm": 3.9831511974334717, | |
| "learning_rate": 9.415001122838537e-05, | |
| "loss": 0.433, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 3.6147892475128174, | |
| "learning_rate": 9.40377273748035e-05, | |
| "loss": 0.4467, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.3232, | |
| "grad_norm": 2.8203067779541016, | |
| "learning_rate": 9.392544352122165e-05, | |
| "loss": 0.4318, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.3264, | |
| "grad_norm": 3.909710645675659, | |
| "learning_rate": 9.381315966763979e-05, | |
| "loss": 0.4278, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.3296, | |
| "grad_norm": 3.8672358989715576, | |
| "learning_rate": 9.370087581405794e-05, | |
| "loss": 0.4164, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.3328, | |
| "grad_norm": 3.101433277130127, | |
| "learning_rate": 9.358859196047609e-05, | |
| "loss": 0.4106, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 3.1750829219818115, | |
| "learning_rate": 9.347630810689423e-05, | |
| "loss": 0.3947, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.3392, | |
| "grad_norm": 4.588505268096924, | |
| "learning_rate": 9.336402425331238e-05, | |
| "loss": 0.3768, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.3424, | |
| "grad_norm": 5.5496439933776855, | |
| "learning_rate": 9.325174039973053e-05, | |
| "loss": 0.3714, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.3456, | |
| "grad_norm": 5.943279266357422, | |
| "learning_rate": 9.313945654614867e-05, | |
| "loss": 0.4012, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.3488, | |
| "grad_norm": 5.788244247436523, | |
| "learning_rate": 9.30271726925668e-05, | |
| "loss": 0.3973, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 3.6481878757476807, | |
| "learning_rate": 9.291488883898496e-05, | |
| "loss": 0.3485, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3552, | |
| "grad_norm": 3.573046922683716, | |
| "learning_rate": 9.280260498540311e-05, | |
| "loss": 0.3547, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.3584, | |
| "grad_norm": 3.5380170345306396, | |
| "learning_rate": 9.269032113182124e-05, | |
| "loss": 0.3657, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.3616, | |
| "grad_norm": 3.2709784507751465, | |
| "learning_rate": 9.25780372782394e-05, | |
| "loss": 0.3118, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.3648, | |
| "grad_norm": 4.098074436187744, | |
| "learning_rate": 9.246575342465755e-05, | |
| "loss": 0.3273, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 4.1382293701171875, | |
| "learning_rate": 9.235346957107568e-05, | |
| "loss": 0.3354, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.3712, | |
| "grad_norm": 3.4519102573394775, | |
| "learning_rate": 9.224118571749383e-05, | |
| "loss": 0.3382, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.3744, | |
| "grad_norm": 2.8919425010681152, | |
| "learning_rate": 9.212890186391197e-05, | |
| "loss": 0.291, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.3776, | |
| "grad_norm": 2.495048761367798, | |
| "learning_rate": 9.201661801033011e-05, | |
| "loss": 0.2636, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3808, | |
| "grad_norm": 2.9462411403656006, | |
| "learning_rate": 9.190433415674826e-05, | |
| "loss": 0.2725, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 2.4778778553009033, | |
| "learning_rate": 9.179205030316641e-05, | |
| "loss": 0.2362, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3872, | |
| "grad_norm": 3.9152615070343018, | |
| "learning_rate": 9.167976644958456e-05, | |
| "loss": 0.2528, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3904, | |
| "grad_norm": 2.1143078804016113, | |
| "learning_rate": 9.15674825960027e-05, | |
| "loss": 0.2317, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.3936, | |
| "grad_norm": 3.8258979320526123, | |
| "learning_rate": 9.145519874242085e-05, | |
| "loss": 0.2333, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3968, | |
| "grad_norm": 2.713571786880493, | |
| "learning_rate": 9.1342914888839e-05, | |
| "loss": 0.2363, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.6469792127609253, | |
| "learning_rate": 9.123063103525713e-05, | |
| "loss": 0.2153, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.4032, | |
| "grad_norm": 2.474316120147705, | |
| "learning_rate": 9.111834718167527e-05, | |
| "loss": 0.2182, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.4064, | |
| "grad_norm": 3.3467183113098145, | |
| "learning_rate": 9.100606332809342e-05, | |
| "loss": 0.2161, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.4096, | |
| "grad_norm": 2.516400098800659, | |
| "learning_rate": 9.089377947451156e-05, | |
| "loss": 0.2209, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.4128, | |
| "grad_norm": 4.334921360015869, | |
| "learning_rate": 9.078149562092971e-05, | |
| "loss": 0.2379, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 2.2681803703308105, | |
| "learning_rate": 9.066921176734786e-05, | |
| "loss": 0.187, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.4192, | |
| "grad_norm": 2.2555055618286133, | |
| "learning_rate": 9.055692791376601e-05, | |
| "loss": 0.1907, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.4224, | |
| "grad_norm": 1.9420446157455444, | |
| "learning_rate": 9.044464406018415e-05, | |
| "loss": 0.1898, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.4256, | |
| "grad_norm": 2.260636806488037, | |
| "learning_rate": 9.03323602066023e-05, | |
| "loss": 0.2024, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.4288, | |
| "grad_norm": 2.1204946041107178, | |
| "learning_rate": 9.022007635302045e-05, | |
| "loss": 0.2073, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 1.8246486186981201, | |
| "learning_rate": 9.010779249943859e-05, | |
| "loss": 0.1872, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.4352, | |
| "grad_norm": 1.934187412261963, | |
| "learning_rate": 8.999550864585672e-05, | |
| "loss": 0.2956, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.4384, | |
| "grad_norm": 2.9299495220184326, | |
| "learning_rate": 8.988322479227488e-05, | |
| "loss": 0.1796, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.4416, | |
| "grad_norm": 1.7090952396392822, | |
| "learning_rate": 8.977094093869301e-05, | |
| "loss": 0.1673, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.4448, | |
| "grad_norm": 2.5589635372161865, | |
| "learning_rate": 8.965865708511116e-05, | |
| "loss": 0.1633, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 1.7202268838882446, | |
| "learning_rate": 8.954637323152931e-05, | |
| "loss": 0.1699, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.4512, | |
| "grad_norm": 1.5926618576049805, | |
| "learning_rate": 8.943408937794746e-05, | |
| "loss": 0.158, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.4544, | |
| "grad_norm": 2.200212240219116, | |
| "learning_rate": 8.93218055243656e-05, | |
| "loss": 0.156, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.4576, | |
| "grad_norm": 2.595520257949829, | |
| "learning_rate": 8.920952167078375e-05, | |
| "loss": 0.18, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.4608, | |
| "grad_norm": 1.519026517868042, | |
| "learning_rate": 8.909723781720189e-05, | |
| "loss": 0.1581, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 1.9958924055099487, | |
| "learning_rate": 8.898495396362003e-05, | |
| "loss": 0.1416, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.4672, | |
| "grad_norm": 1.5572760105133057, | |
| "learning_rate": 8.887267011003818e-05, | |
| "loss": 0.1333, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.4704, | |
| "grad_norm": 2.643303155899048, | |
| "learning_rate": 8.876038625645633e-05, | |
| "loss": 0.1392, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.4736, | |
| "grad_norm": 3.6165688037872314, | |
| "learning_rate": 8.864810240287447e-05, | |
| "loss": 0.1538, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.4768, | |
| "grad_norm": 1.6774343252182007, | |
| "learning_rate": 8.853581854929262e-05, | |
| "loss": 0.131, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.6049036979675293, | |
| "learning_rate": 8.842353469571077e-05, | |
| "loss": 0.1383, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.4832, | |
| "grad_norm": 2.1369471549987793, | |
| "learning_rate": 8.83112508421289e-05, | |
| "loss": 0.1415, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.4864, | |
| "grad_norm": 1.5447083711624146, | |
| "learning_rate": 8.819896698854705e-05, | |
| "loss": 0.1329, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.4896, | |
| "grad_norm": 1.4356424808502197, | |
| "learning_rate": 8.808668313496519e-05, | |
| "loss": 0.1341, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.4928, | |
| "grad_norm": 1.2603282928466797, | |
| "learning_rate": 8.797439928138334e-05, | |
| "loss": 0.1302, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 0.9964632391929626, | |
| "learning_rate": 8.786211542780148e-05, | |
| "loss": 0.1226, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.4992, | |
| "grad_norm": 1.3048876523971558, | |
| "learning_rate": 8.774983157421963e-05, | |
| "loss": 0.1266, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.5024, | |
| "grad_norm": 1.3873602151870728, | |
| "learning_rate": 8.763754772063778e-05, | |
| "loss": 0.1246, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.5056, | |
| "grad_norm": 1.5398775339126587, | |
| "learning_rate": 8.752526386705592e-05, | |
| "loss": 0.1141, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.5088, | |
| "grad_norm": 1.4708116054534912, | |
| "learning_rate": 8.741298001347407e-05, | |
| "loss": 0.1144, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 1.2487561702728271, | |
| "learning_rate": 8.730069615989222e-05, | |
| "loss": 0.1173, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5152, | |
| "grad_norm": 1.6016883850097656, | |
| "learning_rate": 8.718841230631036e-05, | |
| "loss": 0.1204, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.5184, | |
| "grad_norm": 1.900942087173462, | |
| "learning_rate": 8.70761284527285e-05, | |
| "loss": 0.1387, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.5216, | |
| "grad_norm": 1.212538242340088, | |
| "learning_rate": 8.696384459914664e-05, | |
| "loss": 0.1194, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.5248, | |
| "grad_norm": 1.764711856842041, | |
| "learning_rate": 8.68515607455648e-05, | |
| "loss": 0.113, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 1.7841476202011108, | |
| "learning_rate": 8.673927689198293e-05, | |
| "loss": 0.1222, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5312, | |
| "grad_norm": 1.2709699869155884, | |
| "learning_rate": 8.662699303840108e-05, | |
| "loss": 0.1036, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.5344, | |
| "grad_norm": 1.5193073749542236, | |
| "learning_rate": 8.651470918481923e-05, | |
| "loss": 0.1226, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.5376, | |
| "grad_norm": 1.2008721828460693, | |
| "learning_rate": 8.640242533123737e-05, | |
| "loss": 0.1002, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.5408, | |
| "grad_norm": 1.2574729919433594, | |
| "learning_rate": 8.629014147765552e-05, | |
| "loss": 0.1105, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 1.0203262567520142, | |
| "learning_rate": 8.617785762407367e-05, | |
| "loss": 0.0945, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.5472, | |
| "grad_norm": 1.9873650074005127, | |
| "learning_rate": 8.606557377049181e-05, | |
| "loss": 0.0974, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.5504, | |
| "grad_norm": 1.240451693534851, | |
| "learning_rate": 8.595328991690995e-05, | |
| "loss": 0.0985, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.5536, | |
| "grad_norm": 1.0900557041168213, | |
| "learning_rate": 8.58410060633281e-05, | |
| "loss": 0.1049, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.5568, | |
| "grad_norm": 1.9164273738861084, | |
| "learning_rate": 8.572872220974623e-05, | |
| "loss": 0.1059, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.5844411849975586, | |
| "learning_rate": 8.561643835616438e-05, | |
| "loss": 0.1002, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.5632, | |
| "grad_norm": 0.956265389919281, | |
| "learning_rate": 8.550415450258253e-05, | |
| "loss": 0.1018, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.5664, | |
| "grad_norm": 1.0219817161560059, | |
| "learning_rate": 8.539187064900069e-05, | |
| "loss": 0.0953, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.5696, | |
| "grad_norm": 1.30150306224823, | |
| "learning_rate": 8.527958679541882e-05, | |
| "loss": 0.101, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.5728, | |
| "grad_norm": 1.5054088830947876, | |
| "learning_rate": 8.516730294183697e-05, | |
| "loss": 0.1103, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 1.1341224908828735, | |
| "learning_rate": 8.505501908825511e-05, | |
| "loss": 0.1049, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.5792, | |
| "grad_norm": 1.409787654876709, | |
| "learning_rate": 8.494273523467325e-05, | |
| "loss": 0.0854, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.5824, | |
| "grad_norm": 1.009181022644043, | |
| "learning_rate": 8.48304513810914e-05, | |
| "loss": 0.0837, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.5856, | |
| "grad_norm": 1.3304848670959473, | |
| "learning_rate": 8.471816752750955e-05, | |
| "loss": 0.0871, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5888, | |
| "grad_norm": 1.0802825689315796, | |
| "learning_rate": 8.460588367392769e-05, | |
| "loss": 0.0841, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 1.7058296203613281, | |
| "learning_rate": 8.449359982034584e-05, | |
| "loss": 0.0909, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.5952, | |
| "grad_norm": 1.1850258111953735, | |
| "learning_rate": 8.438131596676399e-05, | |
| "loss": 0.0826, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.5984, | |
| "grad_norm": 1.1635477542877197, | |
| "learning_rate": 8.426903211318214e-05, | |
| "loss": 0.0879, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.6016, | |
| "grad_norm": 1.798109769821167, | |
| "learning_rate": 8.415674825960028e-05, | |
| "loss": 0.0934, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.6048, | |
| "grad_norm": 1.6164915561676025, | |
| "learning_rate": 8.404446440601843e-05, | |
| "loss": 0.083, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 1.255283236503601, | |
| "learning_rate": 8.393218055243656e-05, | |
| "loss": 0.0927, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6112, | |
| "grad_norm": 2.333589792251587, | |
| "learning_rate": 8.38198966988547e-05, | |
| "loss": 0.0935, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.6144, | |
| "grad_norm": 1.5682858228683472, | |
| "learning_rate": 8.370761284527285e-05, | |
| "loss": 0.0893, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.6176, | |
| "grad_norm": 1.0004023313522339, | |
| "learning_rate": 8.3595328991691e-05, | |
| "loss": 0.081, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.6208, | |
| "grad_norm": 1.555801272392273, | |
| "learning_rate": 8.348304513810914e-05, | |
| "loss": 0.0779, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 1.1321308612823486, | |
| "learning_rate": 8.337076128452729e-05, | |
| "loss": 0.084, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6272, | |
| "grad_norm": 1.7550809383392334, | |
| "learning_rate": 8.325847743094544e-05, | |
| "loss": 0.0842, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.6304, | |
| "grad_norm": 1.1584142446517944, | |
| "learning_rate": 8.314619357736358e-05, | |
| "loss": 0.0795, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.6336, | |
| "grad_norm": 1.3285061120986938, | |
| "learning_rate": 8.303390972378173e-05, | |
| "loss": 0.0852, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.6368, | |
| "grad_norm": 1.457748532295227, | |
| "learning_rate": 8.292162587019986e-05, | |
| "loss": 0.078, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.4362109899520874, | |
| "learning_rate": 8.280934201661802e-05, | |
| "loss": 0.0806, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6432, | |
| "grad_norm": 1.0163004398345947, | |
| "learning_rate": 8.269705816303615e-05, | |
| "loss": 0.079, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.6464, | |
| "grad_norm": 1.5234326124191284, | |
| "learning_rate": 8.25847743094543e-05, | |
| "loss": 0.0769, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.6496, | |
| "grad_norm": 1.0777095556259155, | |
| "learning_rate": 8.247249045587245e-05, | |
| "loss": 0.0764, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.6528, | |
| "grad_norm": 0.8769171833992004, | |
| "learning_rate": 8.236020660229059e-05, | |
| "loss": 0.0786, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 1.2783554792404175, | |
| "learning_rate": 8.224792274870874e-05, | |
| "loss": 0.0735, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.6592, | |
| "grad_norm": 1.2532243728637695, | |
| "learning_rate": 8.213563889512689e-05, | |
| "loss": 0.0821, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.6624, | |
| "grad_norm": 1.2116466760635376, | |
| "learning_rate": 8.202335504154503e-05, | |
| "loss": 0.071, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.6656, | |
| "grad_norm": 1.4647639989852905, | |
| "learning_rate": 8.191107118796317e-05, | |
| "loss": 0.0741, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.6688, | |
| "grad_norm": 0.9243631958961487, | |
| "learning_rate": 8.179878733438132e-05, | |
| "loss": 0.064, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 1.31109619140625, | |
| "learning_rate": 8.168650348079947e-05, | |
| "loss": 0.07, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.6752, | |
| "grad_norm": 1.062506079673767, | |
| "learning_rate": 8.15742196272176e-05, | |
| "loss": 0.0769, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.6784, | |
| "grad_norm": 1.0503607988357544, | |
| "learning_rate": 8.146193577363576e-05, | |
| "loss": 0.0723, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.6816, | |
| "grad_norm": 1.5333271026611328, | |
| "learning_rate": 8.13496519200539e-05, | |
| "loss": 0.0721, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.6848, | |
| "grad_norm": 2.273676872253418, | |
| "learning_rate": 8.123736806647204e-05, | |
| "loss": 0.0741, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 1.1792391538619995, | |
| "learning_rate": 8.11250842128902e-05, | |
| "loss": 0.0736, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.6912, | |
| "grad_norm": 1.439655065536499, | |
| "learning_rate": 8.101280035930835e-05, | |
| "loss": 0.0603, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.6944, | |
| "grad_norm": 1.2355414628982544, | |
| "learning_rate": 8.090051650572648e-05, | |
| "loss": 0.0701, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.6976, | |
| "grad_norm": 1.1253645420074463, | |
| "learning_rate": 8.078823265214462e-05, | |
| "loss": 0.0785, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.7008, | |
| "grad_norm": 0.7351701259613037, | |
| "learning_rate": 8.067594879856277e-05, | |
| "loss": 0.0728, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.9344348311424255, | |
| "learning_rate": 8.056366494498092e-05, | |
| "loss": 0.0721, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.7072, | |
| "grad_norm": 1.5844933986663818, | |
| "learning_rate": 8.045138109139906e-05, | |
| "loss": 0.0805, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.7104, | |
| "grad_norm": 1.2932401895523071, | |
| "learning_rate": 8.033909723781721e-05, | |
| "loss": 0.0774, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.7136, | |
| "grad_norm": 0.8861138820648193, | |
| "learning_rate": 8.022681338423536e-05, | |
| "loss": 0.0657, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.7168, | |
| "grad_norm": 1.2690577507019043, | |
| "learning_rate": 8.01145295306535e-05, | |
| "loss": 0.0687, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.067091941833496, | |
| "learning_rate": 8.000224567707165e-05, | |
| "loss": 0.0679, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7232, | |
| "grad_norm": 0.9184733629226685, | |
| "learning_rate": 7.988996182348978e-05, | |
| "loss": 0.065, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.7264, | |
| "grad_norm": 0.9760239720344543, | |
| "learning_rate": 7.977767796990792e-05, | |
| "loss": 0.0649, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.7296, | |
| "grad_norm": 0.973116934299469, | |
| "learning_rate": 7.966539411632607e-05, | |
| "loss": 0.0605, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.7328, | |
| "grad_norm": 1.7744946479797363, | |
| "learning_rate": 7.955311026274422e-05, | |
| "loss": 0.0656, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 1.1688623428344727, | |
| "learning_rate": 7.944082640916236e-05, | |
| "loss": 0.0651, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7392, | |
| "grad_norm": 0.7396211624145508, | |
| "learning_rate": 7.932854255558051e-05, | |
| "loss": 0.0625, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.7424, | |
| "grad_norm": 0.9630234837532043, | |
| "learning_rate": 7.921625870199866e-05, | |
| "loss": 0.0648, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.7456, | |
| "grad_norm": 1.169124960899353, | |
| "learning_rate": 7.910397484841681e-05, | |
| "loss": 0.0618, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.7488, | |
| "grad_norm": 1.0389286279678345, | |
| "learning_rate": 7.899169099483495e-05, | |
| "loss": 0.0712, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 1.3336840867996216, | |
| "learning_rate": 7.887940714125309e-05, | |
| "loss": 0.0688, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.7552, | |
| "grad_norm": 0.733096718788147, | |
| "learning_rate": 7.876712328767124e-05, | |
| "loss": 0.067, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.7584, | |
| "grad_norm": 1.309152364730835, | |
| "learning_rate": 7.865483943408937e-05, | |
| "loss": 0.061, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.7616, | |
| "grad_norm": 1.0855865478515625, | |
| "learning_rate": 7.854255558050752e-05, | |
| "loss": 0.0634, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.7648, | |
| "grad_norm": 0.8440483212471008, | |
| "learning_rate": 7.843027172692568e-05, | |
| "loss": 0.0597, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 1.2189304828643799, | |
| "learning_rate": 7.831798787334381e-05, | |
| "loss": 0.0589, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.7712, | |
| "grad_norm": 1.4143438339233398, | |
| "learning_rate": 7.820570401976196e-05, | |
| "loss": 0.0615, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.7744, | |
| "grad_norm": 1.084116816520691, | |
| "learning_rate": 7.809342016618011e-05, | |
| "loss": 0.0583, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.7776, | |
| "grad_norm": 1.9372988939285278, | |
| "learning_rate": 7.798113631259825e-05, | |
| "loss": 0.0567, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.7808, | |
| "grad_norm": 1.1741753816604614, | |
| "learning_rate": 7.78688524590164e-05, | |
| "loss": 0.0574, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 0.9192144870758057, | |
| "learning_rate": 7.775656860543454e-05, | |
| "loss": 0.058, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.7872, | |
| "grad_norm": 1.0589548349380493, | |
| "learning_rate": 7.764428475185269e-05, | |
| "loss": 0.0571, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.7904, | |
| "grad_norm": 1.1447162628173828, | |
| "learning_rate": 7.753200089827083e-05, | |
| "loss": 0.0586, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.7936, | |
| "grad_norm": 0.9086571335792542, | |
| "learning_rate": 7.741971704468898e-05, | |
| "loss": 0.0535, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.7968, | |
| "grad_norm": 1.0444730520248413, | |
| "learning_rate": 7.730743319110713e-05, | |
| "loss": 0.053, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.8324376940727234, | |
| "learning_rate": 7.719514933752526e-05, | |
| "loss": 0.0496, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.8032, | |
| "grad_norm": 0.8477050065994263, | |
| "learning_rate": 7.708286548394342e-05, | |
| "loss": 0.0517, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.8064, | |
| "grad_norm": 0.9891920685768127, | |
| "learning_rate": 7.697058163036157e-05, | |
| "loss": 0.0571, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.8096, | |
| "grad_norm": 0.8290746212005615, | |
| "learning_rate": 7.68582977767797e-05, | |
| "loss": 0.054, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.8128, | |
| "grad_norm": 0.9650336503982544, | |
| "learning_rate": 7.674601392319784e-05, | |
| "loss": 0.0513, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 1.2109074592590332, | |
| "learning_rate": 7.663373006961599e-05, | |
| "loss": 0.0631, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8192, | |
| "grad_norm": 0.8478246331214905, | |
| "learning_rate": 7.652144621603414e-05, | |
| "loss": 0.0529, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.8224, | |
| "grad_norm": 0.9728397130966187, | |
| "learning_rate": 7.640916236245228e-05, | |
| "loss": 0.0599, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.8256, | |
| "grad_norm": 0.6895031332969666, | |
| "learning_rate": 7.629687850887043e-05, | |
| "loss": 0.0526, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.8288, | |
| "grad_norm": 0.6593358516693115, | |
| "learning_rate": 7.618459465528858e-05, | |
| "loss": 0.0579, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 1.1683558225631714, | |
| "learning_rate": 7.607231080170672e-05, | |
| "loss": 0.0514, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8352, | |
| "grad_norm": 1.0191651582717896, | |
| "learning_rate": 7.596002694812487e-05, | |
| "loss": 0.0496, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.8384, | |
| "grad_norm": 0.4969435930252075, | |
| "learning_rate": 7.5847743094543e-05, | |
| "loss": 0.0568, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.8416, | |
| "grad_norm": 1.237692952156067, | |
| "learning_rate": 7.573545924096114e-05, | |
| "loss": 0.0527, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.8448, | |
| "grad_norm": 0.6777151823043823, | |
| "learning_rate": 7.562317538737929e-05, | |
| "loss": 0.0535, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 0.9000877141952515, | |
| "learning_rate": 7.551089153379744e-05, | |
| "loss": 0.05, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.8512, | |
| "grad_norm": 1.368095874786377, | |
| "learning_rate": 7.53986076802156e-05, | |
| "loss": 0.0569, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.8544, | |
| "grad_norm": 0.9844272136688232, | |
| "learning_rate": 7.528632382663373e-05, | |
| "loss": 0.0549, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.8576, | |
| "grad_norm": 0.7378091812133789, | |
| "learning_rate": 7.517403997305188e-05, | |
| "loss": 0.0484, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.8608, | |
| "grad_norm": 1.3697365522384644, | |
| "learning_rate": 7.506175611947003e-05, | |
| "loss": 0.0517, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 0.7792779803276062, | |
| "learning_rate": 7.494947226588817e-05, | |
| "loss": 0.0503, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.8672, | |
| "grad_norm": 1.0768780708312988, | |
| "learning_rate": 7.483718841230631e-05, | |
| "loss": 0.0581, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.8704, | |
| "grad_norm": 0.8705092668533325, | |
| "learning_rate": 7.472490455872446e-05, | |
| "loss": 0.0486, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.8736, | |
| "grad_norm": 0.7994651794433594, | |
| "learning_rate": 7.46126207051426e-05, | |
| "loss": 0.0488, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.8768, | |
| "grad_norm": 1.412158489227295, | |
| "learning_rate": 7.450033685156075e-05, | |
| "loss": 0.0505, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.8864648938179016, | |
| "learning_rate": 7.43880529979789e-05, | |
| "loss": 0.0483, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.8832, | |
| "grad_norm": 0.7613410353660583, | |
| "learning_rate": 7.427576914439703e-05, | |
| "loss": 0.0884, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.8864, | |
| "grad_norm": 1.5085667371749878, | |
| "learning_rate": 7.416348529081518e-05, | |
| "loss": 0.0554, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.8896, | |
| "grad_norm": 0.9009981155395508, | |
| "learning_rate": 7.405120143723333e-05, | |
| "loss": 0.0463, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.8928, | |
| "grad_norm": 0.6694269776344299, | |
| "learning_rate": 7.393891758365149e-05, | |
| "loss": 0.0502, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 0.7809305787086487, | |
| "learning_rate": 7.382663373006962e-05, | |
| "loss": 0.0508, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.8992, | |
| "grad_norm": 0.7939388155937195, | |
| "learning_rate": 7.371434987648776e-05, | |
| "loss": 0.0474, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.9024, | |
| "grad_norm": 0.6125457286834717, | |
| "learning_rate": 7.360206602290591e-05, | |
| "loss": 0.0493, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.9056, | |
| "grad_norm": 0.7115519642829895, | |
| "learning_rate": 7.348978216932405e-05, | |
| "loss": 0.0518, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.9088, | |
| "grad_norm": 0.5694475769996643, | |
| "learning_rate": 7.33774983157422e-05, | |
| "loss": 0.0469, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 0.8270673751831055, | |
| "learning_rate": 7.326521446216035e-05, | |
| "loss": 0.0517, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9152, | |
| "grad_norm": 0.7110369205474854, | |
| "learning_rate": 7.315293060857849e-05, | |
| "loss": 0.0467, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.9184, | |
| "grad_norm": 1.30919349193573, | |
| "learning_rate": 7.304064675499664e-05, | |
| "loss": 0.0448, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.9216, | |
| "grad_norm": 0.9443379044532776, | |
| "learning_rate": 7.292836290141479e-05, | |
| "loss": 0.0482, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.9248, | |
| "grad_norm": 1.0035077333450317, | |
| "learning_rate": 7.281607904783292e-05, | |
| "loss": 0.0469, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 0.6841601729393005, | |
| "learning_rate": 7.270379519425106e-05, | |
| "loss": 0.0525, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9312, | |
| "grad_norm": 1.0206371545791626, | |
| "learning_rate": 7.259151134066921e-05, | |
| "loss": 0.0472, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.9344, | |
| "grad_norm": 1.2588645219802856, | |
| "learning_rate": 7.247922748708736e-05, | |
| "loss": 0.0466, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.9376, | |
| "grad_norm": 0.5839787125587463, | |
| "learning_rate": 7.23669436335055e-05, | |
| "loss": 0.0441, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.9408, | |
| "grad_norm": 0.6651286482810974, | |
| "learning_rate": 7.225465977992365e-05, | |
| "loss": 0.0447, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 0.8288625478744507, | |
| "learning_rate": 7.21423759263418e-05, | |
| "loss": 0.0478, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.9472, | |
| "grad_norm": 0.6164016723632812, | |
| "learning_rate": 7.203009207275994e-05, | |
| "loss": 0.0423, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.9504, | |
| "grad_norm": 0.6110700368881226, | |
| "learning_rate": 7.191780821917809e-05, | |
| "loss": 0.043, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.9536, | |
| "grad_norm": 1.1883233785629272, | |
| "learning_rate": 7.180552436559623e-05, | |
| "loss": 0.0482, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.9568, | |
| "grad_norm": 1.3239836692810059, | |
| "learning_rate": 7.169324051201438e-05, | |
| "loss": 0.0445, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.2216120958328247, | |
| "learning_rate": 7.158095665843251e-05, | |
| "loss": 0.0466, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.9632, | |
| "grad_norm": 0.5407708883285522, | |
| "learning_rate": 7.146867280485066e-05, | |
| "loss": 0.0439, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.9664, | |
| "grad_norm": 0.8621035218238831, | |
| "learning_rate": 7.135638895126882e-05, | |
| "loss": 0.045, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.9696, | |
| "grad_norm": 1.2907600402832031, | |
| "learning_rate": 7.124410509768695e-05, | |
| "loss": 0.0419, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.9728, | |
| "grad_norm": 0.7645203471183777, | |
| "learning_rate": 7.11318212441051e-05, | |
| "loss": 0.0432, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 0.7075303196907043, | |
| "learning_rate": 7.101953739052325e-05, | |
| "loss": 0.0468, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.9792, | |
| "grad_norm": 1.7942383289337158, | |
| "learning_rate": 7.090725353694139e-05, | |
| "loss": 0.0473, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.9824, | |
| "grad_norm": 0.4584600627422333, | |
| "learning_rate": 7.079496968335954e-05, | |
| "loss": 0.0375, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.9856, | |
| "grad_norm": 0.7096564769744873, | |
| "learning_rate": 7.068268582977768e-05, | |
| "loss": 0.0387, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.9888, | |
| "grad_norm": 0.7199975848197937, | |
| "learning_rate": 7.057040197619582e-05, | |
| "loss": 0.0445, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 0.8981589078903198, | |
| "learning_rate": 7.045811812261397e-05, | |
| "loss": 0.0408, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.9952, | |
| "grad_norm": 0.8166985511779785, | |
| "learning_rate": 7.034583426903212e-05, | |
| "loss": 0.043, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.9984, | |
| "grad_norm": 1.4462010860443115, | |
| "learning_rate": 7.023355041545027e-05, | |
| "loss": 0.0422, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.0016, | |
| "grad_norm": 1.0512330532073975, | |
| "learning_rate": 7.01212665618684e-05, | |
| "loss": 0.0443, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.0048, | |
| "grad_norm": 0.9778704047203064, | |
| "learning_rate": 7.000898270828656e-05, | |
| "loss": 0.0422, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.008, | |
| "grad_norm": 0.5280322432518005, | |
| "learning_rate": 6.98966988547047e-05, | |
| "loss": 0.044, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.0112, | |
| "grad_norm": 0.8634210824966431, | |
| "learning_rate": 6.978441500112284e-05, | |
| "loss": 0.0449, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.0144, | |
| "grad_norm": 1.042433500289917, | |
| "learning_rate": 6.967213114754098e-05, | |
| "loss": 0.0429, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.0176, | |
| "grad_norm": 1.1817718744277954, | |
| "learning_rate": 6.955984729395913e-05, | |
| "loss": 0.0403, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.0208, | |
| "grad_norm": 0.9091728925704956, | |
| "learning_rate": 6.944756344037727e-05, | |
| "loss": 0.0378, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.024, | |
| "grad_norm": 0.6847913265228271, | |
| "learning_rate": 6.933527958679542e-05, | |
| "loss": 0.0399, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.0272, | |
| "grad_norm": 0.43094027042388916, | |
| "learning_rate": 6.922299573321357e-05, | |
| "loss": 0.0441, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.0304, | |
| "grad_norm": 0.8904466032981873, | |
| "learning_rate": 6.911071187963172e-05, | |
| "loss": 0.0407, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.0336, | |
| "grad_norm": 0.8264906406402588, | |
| "learning_rate": 6.899842802604986e-05, | |
| "loss": 0.0363, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.0368, | |
| "grad_norm": 0.9013900756835938, | |
| "learning_rate": 6.888614417246801e-05, | |
| "loss": 0.0359, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.898671567440033, | |
| "learning_rate": 6.877386031888615e-05, | |
| "loss": 0.0438, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.0432, | |
| "grad_norm": 0.6732982993125916, | |
| "learning_rate": 6.866157646530428e-05, | |
| "loss": 0.0339, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.0464, | |
| "grad_norm": 0.7971717715263367, | |
| "learning_rate": 6.854929261172243e-05, | |
| "loss": 0.0395, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.0496, | |
| "grad_norm": 1.0535839796066284, | |
| "learning_rate": 6.843700875814058e-05, | |
| "loss": 0.0379, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.0528, | |
| "grad_norm": 1.0416865348815918, | |
| "learning_rate": 6.832472490455872e-05, | |
| "loss": 0.0375, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.056, | |
| "grad_norm": 0.8148690462112427, | |
| "learning_rate": 6.821244105097687e-05, | |
| "loss": 0.0438, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.0592, | |
| "grad_norm": 0.7114180326461792, | |
| "learning_rate": 6.810015719739502e-05, | |
| "loss": 0.0383, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.0624, | |
| "grad_norm": 0.5412663817405701, | |
| "learning_rate": 6.798787334381316e-05, | |
| "loss": 0.0428, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.0656, | |
| "grad_norm": 1.0805037021636963, | |
| "learning_rate": 6.787558949023131e-05, | |
| "loss": 0.0365, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.0688, | |
| "grad_norm": 0.7078894972801208, | |
| "learning_rate": 6.776330563664946e-05, | |
| "loss": 0.0392, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.072, | |
| "grad_norm": 0.5871229767799377, | |
| "learning_rate": 6.76510217830676e-05, | |
| "loss": 0.0371, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.0752, | |
| "grad_norm": 0.7829983830451965, | |
| "learning_rate": 6.753873792948574e-05, | |
| "loss": 0.0379, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.0784, | |
| "grad_norm": 0.8715881705284119, | |
| "learning_rate": 6.742645407590389e-05, | |
| "loss": 0.0416, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.0816, | |
| "grad_norm": 0.6862126588821411, | |
| "learning_rate": 6.731417022232204e-05, | |
| "loss": 0.0381, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.0848, | |
| "grad_norm": 1.577975869178772, | |
| "learning_rate": 6.720188636874017e-05, | |
| "loss": 0.0369, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.088, | |
| "grad_norm": 0.6323923468589783, | |
| "learning_rate": 6.708960251515832e-05, | |
| "loss": 0.0341, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.0912, | |
| "grad_norm": 1.6176074743270874, | |
| "learning_rate": 6.697731866157648e-05, | |
| "loss": 0.0383, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.0944, | |
| "grad_norm": 0.9251887798309326, | |
| "learning_rate": 6.686503480799461e-05, | |
| "loss": 0.0362, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.0976, | |
| "grad_norm": 0.669974684715271, | |
| "learning_rate": 6.675275095441276e-05, | |
| "loss": 0.0401, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.1008, | |
| "grad_norm": 0.643669843673706, | |
| "learning_rate": 6.66404671008309e-05, | |
| "loss": 0.0371, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.104, | |
| "grad_norm": 0.5590800046920776, | |
| "learning_rate": 6.652818324724905e-05, | |
| "loss": 0.037, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.1072, | |
| "grad_norm": 0.4101477563381195, | |
| "learning_rate": 6.641589939366719e-05, | |
| "loss": 0.0385, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.1104, | |
| "grad_norm": 0.6495181322097778, | |
| "learning_rate": 6.630361554008534e-05, | |
| "loss": 0.0351, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.1136, | |
| "grad_norm": 0.9820901155471802, | |
| "learning_rate": 6.619133168650349e-05, | |
| "loss": 0.0371, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.1168, | |
| "grad_norm": 0.5988272428512573, | |
| "learning_rate": 6.607904783292163e-05, | |
| "loss": 0.0339, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.012909173965454, | |
| "learning_rate": 6.596676397933978e-05, | |
| "loss": 0.0395, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.1232, | |
| "grad_norm": 0.5585185885429382, | |
| "learning_rate": 6.585448012575793e-05, | |
| "loss": 0.0339, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.1264, | |
| "grad_norm": 1.0927432775497437, | |
| "learning_rate": 6.574219627217606e-05, | |
| "loss": 0.0347, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.1296, | |
| "grad_norm": 0.48572611808776855, | |
| "learning_rate": 6.56299124185942e-05, | |
| "loss": 0.0346, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.1328, | |
| "grad_norm": 0.7249963879585266, | |
| "learning_rate": 6.551762856501235e-05, | |
| "loss": 0.0383, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.1360000000000001, | |
| "grad_norm": 0.8811891674995422, | |
| "learning_rate": 6.540534471143049e-05, | |
| "loss": 0.036, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.1392, | |
| "grad_norm": 0.34915685653686523, | |
| "learning_rate": 6.529306085784864e-05, | |
| "loss": 0.0364, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.1424, | |
| "grad_norm": 0.7498555183410645, | |
| "learning_rate": 6.518077700426679e-05, | |
| "loss": 0.0363, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.1456, | |
| "grad_norm": 0.5287899971008301, | |
| "learning_rate": 6.506849315068494e-05, | |
| "loss": 0.036, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.1488, | |
| "grad_norm": 0.927849292755127, | |
| "learning_rate": 6.495620929710308e-05, | |
| "loss": 0.0366, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.152, | |
| "grad_norm": 0.8190979957580566, | |
| "learning_rate": 6.484392544352123e-05, | |
| "loss": 0.0372, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.1552, | |
| "grad_norm": 0.5352613925933838, | |
| "learning_rate": 6.473164158993938e-05, | |
| "loss": 0.0337, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.1584, | |
| "grad_norm": 0.5148279070854187, | |
| "learning_rate": 6.461935773635752e-05, | |
| "loss": 0.0347, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.1616, | |
| "grad_norm": 0.5114535093307495, | |
| "learning_rate": 6.450707388277565e-05, | |
| "loss": 0.0305, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.1648, | |
| "grad_norm": 0.7265550494194031, | |
| "learning_rate": 6.43947900291938e-05, | |
| "loss": 0.0342, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.168, | |
| "grad_norm": 1.08705735206604, | |
| "learning_rate": 6.428250617561194e-05, | |
| "loss": 0.0329, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.1712, | |
| "grad_norm": 0.4121463894844055, | |
| "learning_rate": 6.417022232203009e-05, | |
| "loss": 0.0295, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.1743999999999999, | |
| "grad_norm": 0.8857820630073547, | |
| "learning_rate": 6.405793846844824e-05, | |
| "loss": 0.0358, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.1776, | |
| "grad_norm": 0.5461627244949341, | |
| "learning_rate": 6.39456546148664e-05, | |
| "loss": 0.0293, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.1808, | |
| "grad_norm": 0.570478618144989, | |
| "learning_rate": 6.383337076128453e-05, | |
| "loss": 0.0306, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.184, | |
| "grad_norm": 0.6226683855056763, | |
| "learning_rate": 6.372108690770268e-05, | |
| "loss": 0.034, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.1872, | |
| "grad_norm": 0.6360095739364624, | |
| "learning_rate": 6.360880305412082e-05, | |
| "loss": 0.0354, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.1904, | |
| "grad_norm": 0.8939719796180725, | |
| "learning_rate": 6.349651920053896e-05, | |
| "loss": 0.0341, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.1936, | |
| "grad_norm": 0.7986194491386414, | |
| "learning_rate": 6.338423534695711e-05, | |
| "loss": 0.0326, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.1968, | |
| "grad_norm": 1.182988166809082, | |
| "learning_rate": 6.327195149337526e-05, | |
| "loss": 0.0335, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 2.2282466888427734, | |
| "learning_rate": 6.31596676397934e-05, | |
| "loss": 0.2185, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.2032, | |
| "grad_norm": 0.41253259778022766, | |
| "learning_rate": 6.304738378621155e-05, | |
| "loss": 3.791, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.2064, | |
| "grad_norm": 8.564937591552734, | |
| "learning_rate": 6.29350999326297e-05, | |
| "loss": 1.1356, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.2096, | |
| "grad_norm": 0.7298255562782288, | |
| "learning_rate": 6.282281607904783e-05, | |
| "loss": 0.033, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.2128, | |
| "grad_norm": 0.6668412685394287, | |
| "learning_rate": 6.271053222546598e-05, | |
| "loss": 0.0362, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.216, | |
| "grad_norm": 0.8170273900032043, | |
| "learning_rate": 6.259824837188412e-05, | |
| "loss": 0.1264, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.2192, | |
| "grad_norm": 0.7514665722846985, | |
| "learning_rate": 6.248596451830227e-05, | |
| "loss": 0.0407, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.2224, | |
| "grad_norm": 1.0966761112213135, | |
| "learning_rate": 6.237368066472041e-05, | |
| "loss": 0.0362, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.2256, | |
| "grad_norm": 0.5392136573791504, | |
| "learning_rate": 6.226139681113856e-05, | |
| "loss": 0.0326, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.2288000000000001, | |
| "grad_norm": 0.5539236664772034, | |
| "learning_rate": 6.214911295755671e-05, | |
| "loss": 0.0343, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.232, | |
| "grad_norm": 0.7286869287490845, | |
| "learning_rate": 6.203682910397485e-05, | |
| "loss": 0.0299, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.2352, | |
| "grad_norm": 0.5950853824615479, | |
| "learning_rate": 6.1924545250393e-05, | |
| "loss": 0.0331, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.2384, | |
| "grad_norm": 0.8852615356445312, | |
| "learning_rate": 6.181226139681115e-05, | |
| "loss": 0.0391, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.2416, | |
| "grad_norm": 0.7315155863761902, | |
| "learning_rate": 6.169997754322929e-05, | |
| "loss": 0.0318, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.2448, | |
| "grad_norm": 0.6118606925010681, | |
| "learning_rate": 6.158769368964744e-05, | |
| "loss": 0.0356, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.248, | |
| "grad_norm": 0.7391502857208252, | |
| "learning_rate": 6.147540983606557e-05, | |
| "loss": 0.0337, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.2511999999999999, | |
| "grad_norm": 0.8421064615249634, | |
| "learning_rate": 6.136312598248372e-05, | |
| "loss": 0.0311, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.2544, | |
| "grad_norm": 0.49847543239593506, | |
| "learning_rate": 6.125084212890186e-05, | |
| "loss": 0.0309, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.2576, | |
| "grad_norm": 0.745847761631012, | |
| "learning_rate": 6.113855827532001e-05, | |
| "loss": 0.0315, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.2608, | |
| "grad_norm": 1.007932424545288, | |
| "learning_rate": 6.102627442173816e-05, | |
| "loss": 0.0363, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.264, | |
| "grad_norm": 0.4820585250854492, | |
| "learning_rate": 6.09139905681563e-05, | |
| "loss": 0.0361, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.2671999999999999, | |
| "grad_norm": 0.7210965752601624, | |
| "learning_rate": 6.0801706714574444e-05, | |
| "loss": 0.0412, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.2704, | |
| "grad_norm": 0.7958953380584717, | |
| "learning_rate": 6.0689422860992595e-05, | |
| "loss": 0.0398, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.2736, | |
| "grad_norm": 0.6415374875068665, | |
| "learning_rate": 6.057713900741073e-05, | |
| "loss": 0.0359, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.2768, | |
| "grad_norm": 0.6547184586524963, | |
| "learning_rate": 6.046485515382888e-05, | |
| "loss": 0.0316, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.7874923944473267, | |
| "learning_rate": 6.0352571300247026e-05, | |
| "loss": 0.0316, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.2832, | |
| "grad_norm": 0.8263147473335266, | |
| "learning_rate": 6.024028744666518e-05, | |
| "loss": 0.0285, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.2864, | |
| "grad_norm": 0.5803816318511963, | |
| "learning_rate": 6.0128003593083314e-05, | |
| "loss": 0.0314, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.2896, | |
| "grad_norm": 0.8199337124824524, | |
| "learning_rate": 6.0015719739501465e-05, | |
| "loss": 0.0282, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.2928, | |
| "grad_norm": 1.0405923128128052, | |
| "learning_rate": 5.9903435885919615e-05, | |
| "loss": 0.0274, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.296, | |
| "grad_norm": 0.7816119194030762, | |
| "learning_rate": 5.979115203233775e-05, | |
| "loss": 0.0327, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.2992, | |
| "grad_norm": 0.7621133923530579, | |
| "learning_rate": 5.9678868178755896e-05, | |
| "loss": 0.0345, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.3024, | |
| "grad_norm": 1.0627555847167969, | |
| "learning_rate": 5.956658432517405e-05, | |
| "loss": 0.0341, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.3056, | |
| "grad_norm": 0.5056561827659607, | |
| "learning_rate": 5.9454300471592184e-05, | |
| "loss": 0.0322, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.3088, | |
| "grad_norm": 0.639519214630127, | |
| "learning_rate": 5.9342016618010335e-05, | |
| "loss": 0.0301, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.312, | |
| "grad_norm": 0.7004682421684265, | |
| "learning_rate": 5.922973276442848e-05, | |
| "loss": 0.0314, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.3152, | |
| "grad_norm": 0.8585867881774902, | |
| "learning_rate": 5.9117448910846616e-05, | |
| "loss": 0.0298, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.3184, | |
| "grad_norm": 0.8255482316017151, | |
| "learning_rate": 5.900516505726477e-05, | |
| "loss": 0.0273, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.3216, | |
| "grad_norm": 0.9384375214576721, | |
| "learning_rate": 5.889288120368292e-05, | |
| "loss": 0.0244, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.3248, | |
| "grad_norm": 0.9519423842430115, | |
| "learning_rate": 5.878059735010106e-05, | |
| "loss": 0.0315, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.328, | |
| "grad_norm": 0.40597301721572876, | |
| "learning_rate": 5.86683134965192e-05, | |
| "loss": 0.0292, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.3312, | |
| "grad_norm": 0.7606236338615417, | |
| "learning_rate": 5.855602964293735e-05, | |
| "loss": 0.0264, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.3344, | |
| "grad_norm": 0.4717922508716583, | |
| "learning_rate": 5.84437457893555e-05, | |
| "loss": 0.0273, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.3376000000000001, | |
| "grad_norm": 0.6877211928367615, | |
| "learning_rate": 5.833146193577364e-05, | |
| "loss": 0.0297, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.3408, | |
| "grad_norm": 0.5807251930236816, | |
| "learning_rate": 5.821917808219178e-05, | |
| "loss": 0.0292, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.3439999999999999, | |
| "grad_norm": 0.7031255960464478, | |
| "learning_rate": 5.810689422860993e-05, | |
| "loss": 0.0281, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.3472, | |
| "grad_norm": 0.5905170440673828, | |
| "learning_rate": 5.799461037502807e-05, | |
| "loss": 0.0291, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.3504, | |
| "grad_norm": 0.6371684074401855, | |
| "learning_rate": 5.788232652144622e-05, | |
| "loss": 0.0271, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.3536000000000001, | |
| "grad_norm": 0.5192740559577942, | |
| "learning_rate": 5.777004266786436e-05, | |
| "loss": 0.0277, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.3568, | |
| "grad_norm": 0.4487575590610504, | |
| "learning_rate": 5.7657758814282514e-05, | |
| "loss": 0.0251, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 0.8581016063690186, | |
| "learning_rate": 5.754547496070065e-05, | |
| "loss": 0.0257, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.3632, | |
| "grad_norm": 1.2987688779830933, | |
| "learning_rate": 5.74331911071188e-05, | |
| "loss": 0.0259, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.3664, | |
| "grad_norm": 0.6300547122955322, | |
| "learning_rate": 5.7320907253536945e-05, | |
| "loss": 0.027, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.3696, | |
| "grad_norm": 0.5303404331207275, | |
| "learning_rate": 5.720862339995508e-05, | |
| "loss": 0.0291, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.3728, | |
| "grad_norm": 0.6453747749328613, | |
| "learning_rate": 5.709633954637323e-05, | |
| "loss": 0.0288, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.376, | |
| "grad_norm": 0.6400272846221924, | |
| "learning_rate": 5.6984055692791384e-05, | |
| "loss": 0.028, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.3792, | |
| "grad_norm": 0.5555694699287415, | |
| "learning_rate": 5.687177183920952e-05, | |
| "loss": 0.0299, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.3824, | |
| "grad_norm": 1.4886265993118286, | |
| "learning_rate": 5.675948798562767e-05, | |
| "loss": 0.0247, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.3856, | |
| "grad_norm": 0.6857733130455017, | |
| "learning_rate": 5.6647204132045816e-05, | |
| "loss": 0.0306, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.3888, | |
| "grad_norm": 0.6037392020225525, | |
| "learning_rate": 5.653492027846395e-05, | |
| "loss": 0.0273, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.392, | |
| "grad_norm": 0.631255030632019, | |
| "learning_rate": 5.6422636424882103e-05, | |
| "loss": 0.0263, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.3952, | |
| "grad_norm": 0.8216801881790161, | |
| "learning_rate": 5.6310352571300254e-05, | |
| "loss": 0.028, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.3984, | |
| "grad_norm": 0.5431815385818481, | |
| "learning_rate": 5.61980687177184e-05, | |
| "loss": 0.0236, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.4016, | |
| "grad_norm": 0.780263364315033, | |
| "learning_rate": 5.6085784864136535e-05, | |
| "loss": 0.0244, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.4048, | |
| "grad_norm": 0.9671525955200195, | |
| "learning_rate": 5.5973501010554686e-05, | |
| "loss": 0.0228, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.408, | |
| "grad_norm": 0.9191218018531799, | |
| "learning_rate": 5.5861217156972837e-05, | |
| "loss": 0.0292, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.4112, | |
| "grad_norm": 0.6263865232467651, | |
| "learning_rate": 5.5748933303390974e-05, | |
| "loss": 0.0261, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.4144, | |
| "grad_norm": 0.50249844789505, | |
| "learning_rate": 5.563664944980912e-05, | |
| "loss": 0.0309, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.4176, | |
| "grad_norm": 0.5489816665649414, | |
| "learning_rate": 5.552436559622727e-05, | |
| "loss": 0.0234, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.4208, | |
| "grad_norm": 0.969399631023407, | |
| "learning_rate": 5.5412081742645405e-05, | |
| "loss": 0.0265, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.424, | |
| "grad_norm": 0.47226765751838684, | |
| "learning_rate": 5.5299797889063556e-05, | |
| "loss": 0.022, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.4272, | |
| "grad_norm": 0.4909917712211609, | |
| "learning_rate": 5.51875140354817e-05, | |
| "loss": 0.0244, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.4304000000000001, | |
| "grad_norm": 0.587851345539093, | |
| "learning_rate": 5.507523018189985e-05, | |
| "loss": 0.0215, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.4336, | |
| "grad_norm": 0.699611246585846, | |
| "learning_rate": 5.496294632831799e-05, | |
| "loss": 0.0281, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.4368, | |
| "grad_norm": 0.6853237748146057, | |
| "learning_rate": 5.485066247473614e-05, | |
| "loss": 0.0263, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.5646203756332397, | |
| "learning_rate": 5.473837862115428e-05, | |
| "loss": 0.0199, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.4432, | |
| "grad_norm": 0.7595967650413513, | |
| "learning_rate": 5.462609476757242e-05, | |
| "loss": 0.0266, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.4464000000000001, | |
| "grad_norm": 0.46534693241119385, | |
| "learning_rate": 5.451381091399057e-05, | |
| "loss": 0.0262, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.4496, | |
| "grad_norm": 0.6324544548988342, | |
| "learning_rate": 5.440152706040872e-05, | |
| "loss": 0.0275, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.4527999999999999, | |
| "grad_norm": 0.4882917106151581, | |
| "learning_rate": 5.428924320682686e-05, | |
| "loss": 0.025, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.456, | |
| "grad_norm": 0.6180522441864014, | |
| "learning_rate": 5.4176959353245e-05, | |
| "loss": 0.0269, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.4592, | |
| "grad_norm": 0.8303629159927368, | |
| "learning_rate": 5.406467549966315e-05, | |
| "loss": 0.0248, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.4624, | |
| "grad_norm": 0.8613302111625671, | |
| "learning_rate": 5.395239164608129e-05, | |
| "loss": 0.0254, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.4656, | |
| "grad_norm": 0.46437305212020874, | |
| "learning_rate": 5.384010779249944e-05, | |
| "loss": 0.0227, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.4687999999999999, | |
| "grad_norm": 0.48645541071891785, | |
| "learning_rate": 5.3727823938917584e-05, | |
| "loss": 0.022, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.472, | |
| "grad_norm": 0.5474770069122314, | |
| "learning_rate": 5.3615540085335735e-05, | |
| "loss": 0.0242, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.4752, | |
| "grad_norm": 0.5821073651313782, | |
| "learning_rate": 5.350325623175387e-05, | |
| "loss": 0.0242, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.4784, | |
| "grad_norm": 0.5728013515472412, | |
| "learning_rate": 5.339097237817202e-05, | |
| "loss": 0.0307, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.4816, | |
| "grad_norm": 0.71209716796875, | |
| "learning_rate": 5.327868852459017e-05, | |
| "loss": 0.0246, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.4848, | |
| "grad_norm": 0.43612465262413025, | |
| "learning_rate": 5.316640467100831e-05, | |
| "loss": 0.0273, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.488, | |
| "grad_norm": 0.6440733671188354, | |
| "learning_rate": 5.3054120817426454e-05, | |
| "loss": 0.0203, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.4912, | |
| "grad_norm": 0.9266752600669861, | |
| "learning_rate": 5.2941836963844605e-05, | |
| "loss": 0.0229, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.4944, | |
| "grad_norm": 0.4240361750125885, | |
| "learning_rate": 5.282955311026274e-05, | |
| "loss": 0.0196, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.4976, | |
| "grad_norm": 0.6998528242111206, | |
| "learning_rate": 5.271726925668089e-05, | |
| "loss": 0.0229, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.5008, | |
| "grad_norm": 0.8201552033424377, | |
| "learning_rate": 5.260498540309904e-05, | |
| "loss": 0.0258, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.504, | |
| "grad_norm": 0.4481537342071533, | |
| "learning_rate": 5.249270154951719e-05, | |
| "loss": 0.0227, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.5072, | |
| "grad_norm": 0.4805366098880768, | |
| "learning_rate": 5.2380417695935325e-05, | |
| "loss": 0.0203, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.5104, | |
| "grad_norm": 0.7091923356056213, | |
| "learning_rate": 5.2268133842353475e-05, | |
| "loss": 0.0203, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.5135999999999998, | |
| "grad_norm": 0.8759817481040955, | |
| "learning_rate": 5.215584998877162e-05, | |
| "loss": 0.0234, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.5168, | |
| "grad_norm": 0.5079240798950195, | |
| "learning_rate": 5.2043566135189756e-05, | |
| "loss": 0.0203, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.8426089286804199, | |
| "learning_rate": 5.193128228160791e-05, | |
| "loss": 0.0242, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.5232, | |
| "grad_norm": 0.5041608810424805, | |
| "learning_rate": 5.181899842802606e-05, | |
| "loss": 0.0208, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.5264, | |
| "grad_norm": 0.567385733127594, | |
| "learning_rate": 5.1706714574444195e-05, | |
| "loss": 0.025, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.5295999999999998, | |
| "grad_norm": 0.4750799238681793, | |
| "learning_rate": 5.159443072086234e-05, | |
| "loss": 0.0194, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.5328, | |
| "grad_norm": 0.9102836847305298, | |
| "learning_rate": 5.148214686728049e-05, | |
| "loss": 0.0214, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.536, | |
| "grad_norm": 0.49438390135765076, | |
| "learning_rate": 5.136986301369864e-05, | |
| "loss": 0.025, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.5392000000000001, | |
| "grad_norm": 0.3925231695175171, | |
| "learning_rate": 5.125757916011678e-05, | |
| "loss": 0.0193, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.5424, | |
| "grad_norm": 0.9309172034263611, | |
| "learning_rate": 5.114529530653492e-05, | |
| "loss": 0.029, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.5455999999999999, | |
| "grad_norm": 0.7060449123382568, | |
| "learning_rate": 5.103301145295307e-05, | |
| "loss": 0.0232, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.5488, | |
| "grad_norm": 0.5226718187332153, | |
| "learning_rate": 5.092072759937121e-05, | |
| "loss": 0.0215, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.552, | |
| "grad_norm": 0.6417393088340759, | |
| "learning_rate": 5.080844374578936e-05, | |
| "loss": 0.0221, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.5552000000000001, | |
| "grad_norm": 0.3402005434036255, | |
| "learning_rate": 5.0696159892207503e-05, | |
| "loss": 0.0203, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.5584, | |
| "grad_norm": 0.5490476489067078, | |
| "learning_rate": 5.058387603862564e-05, | |
| "loss": 0.0231, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.5615999999999999, | |
| "grad_norm": 0.7371191382408142, | |
| "learning_rate": 5.047159218504379e-05, | |
| "loss": 0.0202, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.5648, | |
| "grad_norm": 0.5394011735916138, | |
| "learning_rate": 5.035930833146194e-05, | |
| "loss": 0.0231, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.568, | |
| "grad_norm": 0.6556629538536072, | |
| "learning_rate": 5.024702447788008e-05, | |
| "loss": 0.0225, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.5712000000000002, | |
| "grad_norm": 0.4400472342967987, | |
| "learning_rate": 5.013474062429823e-05, | |
| "loss": 0.0217, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.5744, | |
| "grad_norm": 0.5528716444969177, | |
| "learning_rate": 5.0022456770716374e-05, | |
| "loss": 0.0218, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.5776, | |
| "grad_norm": 0.53992760181427, | |
| "learning_rate": 4.991017291713452e-05, | |
| "loss": 0.0218, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.5808, | |
| "grad_norm": 0.5271251201629639, | |
| "learning_rate": 4.979788906355267e-05, | |
| "loss": 0.0231, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.584, | |
| "grad_norm": 0.5841997861862183, | |
| "learning_rate": 4.968560520997081e-05, | |
| "loss": 0.0232, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.5872000000000002, | |
| "grad_norm": 0.6428009867668152, | |
| "learning_rate": 4.957332135638895e-05, | |
| "loss": 0.0221, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.5904, | |
| "grad_norm": 0.39705461263656616, | |
| "learning_rate": 4.94610375028071e-05, | |
| "loss": 0.0231, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.5936, | |
| "grad_norm": 0.5390853881835938, | |
| "learning_rate": 4.9348753649225244e-05, | |
| "loss": 0.0238, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.5968, | |
| "grad_norm": 0.5942838191986084, | |
| "learning_rate": 4.923646979564339e-05, | |
| "loss": 0.0213, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.6425515413284302, | |
| "learning_rate": 4.912418594206153e-05, | |
| "loss": 0.0237, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.6032, | |
| "grad_norm": 0.7233427166938782, | |
| "learning_rate": 4.9011902088479676e-05, | |
| "loss": 0.0223, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.6064, | |
| "grad_norm": 0.43313702940940857, | |
| "learning_rate": 4.8899618234897826e-05, | |
| "loss": 0.0245, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.6096, | |
| "grad_norm": 0.3017479479312897, | |
| "learning_rate": 4.878733438131597e-05, | |
| "loss": 0.0223, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.6128, | |
| "grad_norm": 0.3023962080478668, | |
| "learning_rate": 4.8675050527734114e-05, | |
| "loss": 0.0184, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.616, | |
| "grad_norm": 1.2988214492797852, | |
| "learning_rate": 4.856276667415226e-05, | |
| "loss": 0.0163, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.6192, | |
| "grad_norm": 0.427177369594574, | |
| "learning_rate": 4.84504828205704e-05, | |
| "loss": 0.0255, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.6223999999999998, | |
| "grad_norm": 0.5730947852134705, | |
| "learning_rate": 4.833819896698855e-05, | |
| "loss": 0.0227, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.6256, | |
| "grad_norm": 0.5415002107620239, | |
| "learning_rate": 4.8225915113406696e-05, | |
| "loss": 0.0223, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.6288, | |
| "grad_norm": 0.608927845954895, | |
| "learning_rate": 4.811363125982484e-05, | |
| "loss": 0.0236, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.6320000000000001, | |
| "grad_norm": 0.4526962339878082, | |
| "learning_rate": 4.8001347406242984e-05, | |
| "loss": 0.0225, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.6352, | |
| "grad_norm": 0.48219379782676697, | |
| "learning_rate": 4.788906355266113e-05, | |
| "loss": 0.0206, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.6383999999999999, | |
| "grad_norm": 0.4380073845386505, | |
| "learning_rate": 4.777677969907928e-05, | |
| "loss": 0.0179, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.6416, | |
| "grad_norm": 0.4661351144313812, | |
| "learning_rate": 4.766449584549742e-05, | |
| "loss": 0.0211, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.6448, | |
| "grad_norm": 0.6973872184753418, | |
| "learning_rate": 4.755221199191556e-05, | |
| "loss": 0.0185, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.6480000000000001, | |
| "grad_norm": 1.0947307348251343, | |
| "learning_rate": 4.743992813833371e-05, | |
| "loss": 0.0214, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.6512, | |
| "grad_norm": 0.4728698432445526, | |
| "learning_rate": 4.7327644284751854e-05, | |
| "loss": 0.0173, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.6543999999999999, | |
| "grad_norm": 0.6490640640258789, | |
| "learning_rate": 4.7215360431170005e-05, | |
| "loss": 0.0209, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.6576, | |
| "grad_norm": 0.4097549021244049, | |
| "learning_rate": 4.710307657758814e-05, | |
| "loss": 0.0212, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.6608, | |
| "grad_norm": 0.5928251147270203, | |
| "learning_rate": 4.6990792724006286e-05, | |
| "loss": 0.0198, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.6640000000000001, | |
| "grad_norm": 0.30284154415130615, | |
| "learning_rate": 4.687850887042444e-05, | |
| "loss": 0.018, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.6672, | |
| "grad_norm": 0.4404650330543518, | |
| "learning_rate": 4.676622501684258e-05, | |
| "loss": 0.0187, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.6703999999999999, | |
| "grad_norm": 0.9772382378578186, | |
| "learning_rate": 4.6653941163260725e-05, | |
| "loss": 0.0198, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.6736, | |
| "grad_norm": 0.29454249143600464, | |
| "learning_rate": 4.654165730967887e-05, | |
| "loss": 0.0233, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.6768, | |
| "grad_norm": 0.4531678855419159, | |
| "learning_rate": 4.642937345609701e-05, | |
| "loss": 0.0229, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 0.7738605737686157, | |
| "learning_rate": 4.631708960251516e-05, | |
| "loss": 0.02, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.6832, | |
| "grad_norm": 0.38039132952690125, | |
| "learning_rate": 4.620480574893331e-05, | |
| "loss": 0.0174, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.6864, | |
| "grad_norm": 0.5301956534385681, | |
| "learning_rate": 4.609252189535145e-05, | |
| "loss": 0.0207, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.6896, | |
| "grad_norm": 0.4559171497821808, | |
| "learning_rate": 4.5980238041769595e-05, | |
| "loss": 0.0184, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.6928, | |
| "grad_norm": 0.4752689301967621, | |
| "learning_rate": 4.586795418818774e-05, | |
| "loss": 0.0237, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.696, | |
| "grad_norm": 0.4270172417163849, | |
| "learning_rate": 4.575567033460589e-05, | |
| "loss": 0.0219, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.6992, | |
| "grad_norm": 0.2876232862472534, | |
| "learning_rate": 4.564338648102403e-05, | |
| "loss": 0.0176, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.7024, | |
| "grad_norm": 0.5778360366821289, | |
| "learning_rate": 4.553110262744217e-05, | |
| "loss": 0.0204, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.7056, | |
| "grad_norm": 0.29663243889808655, | |
| "learning_rate": 4.541881877386032e-05, | |
| "loss": 0.0196, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.7088, | |
| "grad_norm": 0.6995474100112915, | |
| "learning_rate": 4.5306534920278465e-05, | |
| "loss": 0.0197, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.712, | |
| "grad_norm": 0.375017374753952, | |
| "learning_rate": 4.5194251066696616e-05, | |
| "loss": 0.0187, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.7151999999999998, | |
| "grad_norm": 0.41007381677627563, | |
| "learning_rate": 4.508196721311476e-05, | |
| "loss": 0.0189, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.7184, | |
| "grad_norm": 0.4567541182041168, | |
| "learning_rate": 4.4969683359532897e-05, | |
| "loss": 0.0164, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.7216, | |
| "grad_norm": 0.3248382806777954, | |
| "learning_rate": 4.485739950595105e-05, | |
| "loss": 0.0204, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.7248, | |
| "grad_norm": 0.6825451254844666, | |
| "learning_rate": 4.474511565236919e-05, | |
| "loss": 0.0162, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.728, | |
| "grad_norm": 1.0336426496505737, | |
| "learning_rate": 4.463283179878734e-05, | |
| "loss": 0.0215, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.7311999999999999, | |
| "grad_norm": 0.5652924180030823, | |
| "learning_rate": 4.452054794520548e-05, | |
| "loss": 0.0182, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.7344, | |
| "grad_norm": 0.6609498262405396, | |
| "learning_rate": 4.440826409162362e-05, | |
| "loss": 0.0197, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.7376, | |
| "grad_norm": 0.37504124641418457, | |
| "learning_rate": 4.4295980238041774e-05, | |
| "loss": 0.0201, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.7408000000000001, | |
| "grad_norm": 0.467725932598114, | |
| "learning_rate": 4.418369638445992e-05, | |
| "loss": 0.0174, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.744, | |
| "grad_norm": 0.8544847369194031, | |
| "learning_rate": 4.407141253087806e-05, | |
| "loss": 0.017, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.7471999999999999, | |
| "grad_norm": 0.508170485496521, | |
| "learning_rate": 4.3959128677296205e-05, | |
| "loss": 0.0194, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.7504, | |
| "grad_norm": 0.671903133392334, | |
| "learning_rate": 4.384684482371435e-05, | |
| "loss": 0.0191, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.7536, | |
| "grad_norm": 0.32661280035972595, | |
| "learning_rate": 4.37345609701325e-05, | |
| "loss": 0.0205, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.7568000000000001, | |
| "grad_norm": 0.3774755895137787, | |
| "learning_rate": 4.3622277116550644e-05, | |
| "loss": 0.0182, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.2705847918987274, | |
| "learning_rate": 4.350999326296879e-05, | |
| "loss": 0.0195, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.7631999999999999, | |
| "grad_norm": 0.5216535329818726, | |
| "learning_rate": 4.339770940938693e-05, | |
| "loss": 0.0177, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.7664, | |
| "grad_norm": 0.6242872476577759, | |
| "learning_rate": 4.3285425555805075e-05, | |
| "loss": 0.0201, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.7696, | |
| "grad_norm": 0.6096400022506714, | |
| "learning_rate": 4.3173141702223226e-05, | |
| "loss": 0.0334, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.7728000000000002, | |
| "grad_norm": 0.503533124923706, | |
| "learning_rate": 4.306085784864137e-05, | |
| "loss": 0.0189, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.776, | |
| "grad_norm": 1.0124377012252808, | |
| "learning_rate": 4.294857399505951e-05, | |
| "loss": 0.0213, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.7792, | |
| "grad_norm": 0.3943813443183899, | |
| "learning_rate": 4.283629014147766e-05, | |
| "loss": 0.0202, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.7824, | |
| "grad_norm": 0.7759073972702026, | |
| "learning_rate": 4.27240062878958e-05, | |
| "loss": 0.0222, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.7856, | |
| "grad_norm": 0.6223391890525818, | |
| "learning_rate": 4.261172243431395e-05, | |
| "loss": 0.02, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.7888, | |
| "grad_norm": 0.3250173330307007, | |
| "learning_rate": 4.249943858073209e-05, | |
| "loss": 0.0182, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.792, | |
| "grad_norm": 0.6339522004127502, | |
| "learning_rate": 4.2387154727150233e-05, | |
| "loss": 0.0201, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.7952, | |
| "grad_norm": 0.2987574338912964, | |
| "learning_rate": 4.2274870873568384e-05, | |
| "loss": 0.0225, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.7984, | |
| "grad_norm": 0.8467134833335876, | |
| "learning_rate": 4.216258701998653e-05, | |
| "loss": 0.0211, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.8016, | |
| "grad_norm": 0.26669514179229736, | |
| "learning_rate": 4.205030316640468e-05, | |
| "loss": 0.0198, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.8048, | |
| "grad_norm": 0.9606248140335083, | |
| "learning_rate": 4.1938019312822816e-05, | |
| "loss": 0.0214, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.808, | |
| "grad_norm": 0.37498486042022705, | |
| "learning_rate": 4.182573545924096e-05, | |
| "loss": 0.0177, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.8112, | |
| "grad_norm": 0.37867698073387146, | |
| "learning_rate": 4.171345160565911e-05, | |
| "loss": 0.0165, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.8144, | |
| "grad_norm": 0.3366365432739258, | |
| "learning_rate": 4.1601167752077254e-05, | |
| "loss": 0.0181, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.8176, | |
| "grad_norm": 0.6919273138046265, | |
| "learning_rate": 4.14888838984954e-05, | |
| "loss": 0.0197, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.8208, | |
| "grad_norm": 0.4418683350086212, | |
| "learning_rate": 4.137660004491354e-05, | |
| "loss": 0.0182, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.8239999999999998, | |
| "grad_norm": 0.7254313826560974, | |
| "learning_rate": 4.1264316191331686e-05, | |
| "loss": 0.0165, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.8272, | |
| "grad_norm": 0.40990132093429565, | |
| "learning_rate": 4.115203233774984e-05, | |
| "loss": 0.0254, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.8304, | |
| "grad_norm": 0.5177503228187561, | |
| "learning_rate": 4.103974848416798e-05, | |
| "loss": 0.0194, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.8336000000000001, | |
| "grad_norm": 0.47440338134765625, | |
| "learning_rate": 4.092746463058612e-05, | |
| "loss": 0.0191, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.8368, | |
| "grad_norm": 0.5071251392364502, | |
| "learning_rate": 4.081518077700427e-05, | |
| "loss": 0.0181, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 0.4265029728412628, | |
| "learning_rate": 4.070289692342241e-05, | |
| "loss": 0.0161, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.8432, | |
| "grad_norm": 0.5612635612487793, | |
| "learning_rate": 4.059061306984056e-05, | |
| "loss": 0.0167, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.8464, | |
| "grad_norm": 0.571967601776123, | |
| "learning_rate": 4.047832921625871e-05, | |
| "loss": 0.0172, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.8496000000000001, | |
| "grad_norm": 0.497354656457901, | |
| "learning_rate": 4.0366045362676844e-05, | |
| "loss": 0.0198, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.8528, | |
| "grad_norm": 0.26121917366981506, | |
| "learning_rate": 4.0253761509094995e-05, | |
| "loss": 0.0155, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.8559999999999999, | |
| "grad_norm": 0.2875368595123291, | |
| "learning_rate": 4.014147765551314e-05, | |
| "loss": 0.017, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.8592, | |
| "grad_norm": 0.34400826692581177, | |
| "learning_rate": 4.002919380193129e-05, | |
| "loss": 0.0178, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.8624, | |
| "grad_norm": 0.7186577916145325, | |
| "learning_rate": 3.9916909948349426e-05, | |
| "loss": 0.0174, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.8656000000000001, | |
| "grad_norm": 0.35234335064888, | |
| "learning_rate": 3.980462609476757e-05, | |
| "loss": 0.016, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.8688, | |
| "grad_norm": 0.555631697177887, | |
| "learning_rate": 3.969234224118572e-05, | |
| "loss": 0.0167, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.8719999999999999, | |
| "grad_norm": 0.5011041760444641, | |
| "learning_rate": 3.9580058387603865e-05, | |
| "loss": 0.0157, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.8752, | |
| "grad_norm": 0.5591394901275635, | |
| "learning_rate": 3.946777453402201e-05, | |
| "loss": 0.016, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.8784, | |
| "grad_norm": 0.4178447723388672, | |
| "learning_rate": 3.935549068044015e-05, | |
| "loss": 0.0172, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.8816000000000002, | |
| "grad_norm": 0.7927733659744263, | |
| "learning_rate": 3.9243206826858297e-05, | |
| "loss": 0.0176, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.8848, | |
| "grad_norm": 0.5954960584640503, | |
| "learning_rate": 3.913092297327645e-05, | |
| "loss": 0.0171, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.888, | |
| "grad_norm": 0.42012980580329895, | |
| "learning_rate": 3.901863911969459e-05, | |
| "loss": 0.0161, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.8912, | |
| "grad_norm": 0.3502364158630371, | |
| "learning_rate": 3.8906355266112735e-05, | |
| "loss": 0.0167, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.8944, | |
| "grad_norm": 0.9605923295021057, | |
| "learning_rate": 3.879407141253088e-05, | |
| "loss": 0.0177, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.8976, | |
| "grad_norm": 0.7831478714942932, | |
| "learning_rate": 3.868178755894902e-05, | |
| "loss": 0.0212, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.9008, | |
| "grad_norm": 0.47782811522483826, | |
| "learning_rate": 3.8569503705367174e-05, | |
| "loss": 0.0168, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.904, | |
| "grad_norm": 0.3955000638961792, | |
| "learning_rate": 3.845721985178532e-05, | |
| "loss": 0.0179, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.9072, | |
| "grad_norm": 0.32757633924484253, | |
| "learning_rate": 3.834493599820346e-05, | |
| "loss": 0.019, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.9104, | |
| "grad_norm": 0.3327496647834778, | |
| "learning_rate": 3.8232652144621605e-05, | |
| "loss": 0.0138, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.9136, | |
| "grad_norm": 0.30767822265625, | |
| "learning_rate": 3.812036829103975e-05, | |
| "loss": 0.0178, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.9167999999999998, | |
| "grad_norm": 0.6151719093322754, | |
| "learning_rate": 3.80080844374579e-05, | |
| "loss": 0.0164, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.3100316524505615, | |
| "learning_rate": 3.789580058387604e-05, | |
| "loss": 0.0164, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.9232, | |
| "grad_norm": 0.32930606603622437, | |
| "learning_rate": 3.778351673029418e-05, | |
| "loss": 0.0173, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.9264000000000001, | |
| "grad_norm": 0.5008950233459473, | |
| "learning_rate": 3.767123287671233e-05, | |
| "loss": 0.0175, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.9296, | |
| "grad_norm": 0.616174578666687, | |
| "learning_rate": 3.7558949023130475e-05, | |
| "loss": 0.0214, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.9327999999999999, | |
| "grad_norm": 0.5775612592697144, | |
| "learning_rate": 3.744666516954862e-05, | |
| "loss": 0.0163, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.936, | |
| "grad_norm": 0.3990277349948883, | |
| "learning_rate": 3.733438131596676e-05, | |
| "loss": 0.0143, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.9392, | |
| "grad_norm": 0.3262655735015869, | |
| "learning_rate": 3.722209746238491e-05, | |
| "loss": 0.0154, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.9424000000000001, | |
| "grad_norm": 0.3871186673641205, | |
| "learning_rate": 3.710981360880306e-05, | |
| "loss": 0.023, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.9456, | |
| "grad_norm": 0.5081314444541931, | |
| "learning_rate": 3.69975297552212e-05, | |
| "loss": 0.0196, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.9487999999999999, | |
| "grad_norm": 0.3831084668636322, | |
| "learning_rate": 3.6885245901639346e-05, | |
| "loss": 0.0203, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.952, | |
| "grad_norm": 0.3652055263519287, | |
| "learning_rate": 3.677296204805749e-05, | |
| "loss": 0.0196, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.9552, | |
| "grad_norm": 0.35899677872657776, | |
| "learning_rate": 3.666067819447563e-05, | |
| "loss": 0.0168, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.9584000000000001, | |
| "grad_norm": 0.6105409264564514, | |
| "learning_rate": 3.6548394340893784e-05, | |
| "loss": 0.0177, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.9616, | |
| "grad_norm": 0.4310314357280731, | |
| "learning_rate": 3.643611048731193e-05, | |
| "loss": 0.016, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.9647999999999999, | |
| "grad_norm": 0.4240395426750183, | |
| "learning_rate": 3.632382663373007e-05, | |
| "loss": 0.0151, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.968, | |
| "grad_norm": 0.3501630127429962, | |
| "learning_rate": 3.6211542780148216e-05, | |
| "loss": 0.0146, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.9712, | |
| "grad_norm": 0.9272508025169373, | |
| "learning_rate": 3.609925892656636e-05, | |
| "loss": 0.017, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.9744000000000002, | |
| "grad_norm": 0.38346970081329346, | |
| "learning_rate": 3.598697507298451e-05, | |
| "loss": 0.0162, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.9776, | |
| "grad_norm": 0.46471330523490906, | |
| "learning_rate": 3.587469121940265e-05, | |
| "loss": 0.0198, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.9808, | |
| "grad_norm": 0.4116153120994568, | |
| "learning_rate": 3.57624073658208e-05, | |
| "loss": 0.0145, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.984, | |
| "grad_norm": 0.3835083544254303, | |
| "learning_rate": 3.565012351223894e-05, | |
| "loss": 0.0145, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.9872, | |
| "grad_norm": 0.4784146249294281, | |
| "learning_rate": 3.5537839658657086e-05, | |
| "loss": 0.016, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.9904, | |
| "grad_norm": 0.3538087010383606, | |
| "learning_rate": 3.542555580507524e-05, | |
| "loss": 0.0148, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.9936, | |
| "grad_norm": 0.3081023395061493, | |
| "learning_rate": 3.5313271951493374e-05, | |
| "loss": 0.0139, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.9968, | |
| "grad_norm": 0.3112189471721649, | |
| "learning_rate": 3.5200988097911524e-05, | |
| "loss": 0.0153, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.5729153156280518, | |
| "learning_rate": 3.508870424432967e-05, | |
| "loss": 0.0157, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 2.0032, | |
| "grad_norm": 0.43416017293930054, | |
| "learning_rate": 3.497642039074781e-05, | |
| "loss": 0.017, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 2.0064, | |
| "grad_norm": 0.21156848967075348, | |
| "learning_rate": 3.4864136537165956e-05, | |
| "loss": 0.017, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 2.0096, | |
| "grad_norm": 0.409333199262619, | |
| "learning_rate": 3.47518526835841e-05, | |
| "loss": 0.0166, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 2.0128, | |
| "grad_norm": 0.5283350944519043, | |
| "learning_rate": 3.4639568830002244e-05, | |
| "loss": 0.0142, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 2.016, | |
| "grad_norm": 0.31264883279800415, | |
| "learning_rate": 3.4527284976420395e-05, | |
| "loss": 0.0158, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 2.0192, | |
| "grad_norm": 0.4449731111526489, | |
| "learning_rate": 3.441500112283854e-05, | |
| "loss": 0.0158, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 2.0224, | |
| "grad_norm": 0.47158142924308777, | |
| "learning_rate": 3.430271726925668e-05, | |
| "loss": 0.0154, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 2.0256, | |
| "grad_norm": 0.45148786902427673, | |
| "learning_rate": 3.4190433415674826e-05, | |
| "loss": 0.0175, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 2.0288, | |
| "grad_norm": 0.36628538370132446, | |
| "learning_rate": 3.407814956209297e-05, | |
| "loss": 0.0158, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 2.032, | |
| "grad_norm": 0.33083608746528625, | |
| "learning_rate": 3.396586570851112e-05, | |
| "loss": 0.0151, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 2.0352, | |
| "grad_norm": 0.2731871008872986, | |
| "learning_rate": 3.3853581854929265e-05, | |
| "loss": 0.0134, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 2.0384, | |
| "grad_norm": 0.36465370655059814, | |
| "learning_rate": 3.374129800134741e-05, | |
| "loss": 0.0155, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 2.0416, | |
| "grad_norm": 0.5704035758972168, | |
| "learning_rate": 3.362901414776555e-05, | |
| "loss": 0.0154, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 2.0448, | |
| "grad_norm": 0.5511239767074585, | |
| "learning_rate": 3.3516730294183697e-05, | |
| "loss": 0.0148, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 2.048, | |
| "grad_norm": 0.4203243851661682, | |
| "learning_rate": 3.340444644060185e-05, | |
| "loss": 0.0139, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 2.0512, | |
| "grad_norm": 0.49790966510772705, | |
| "learning_rate": 3.3292162587019984e-05, | |
| "loss": 0.0147, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 2.0544, | |
| "grad_norm": 0.5547811388969421, | |
| "learning_rate": 3.3179878733438135e-05, | |
| "loss": 0.0146, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 2.0576, | |
| "grad_norm": 0.33840295672416687, | |
| "learning_rate": 3.306759487985628e-05, | |
| "loss": 0.0145, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 2.0608, | |
| "grad_norm": 0.6611810326576233, | |
| "learning_rate": 3.295531102627442e-05, | |
| "loss": 0.0135, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 2.064, | |
| "grad_norm": 0.28565412759780884, | |
| "learning_rate": 3.284302717269257e-05, | |
| "loss": 0.0134, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 2.0672, | |
| "grad_norm": 0.4924852252006531, | |
| "learning_rate": 3.273074331911071e-05, | |
| "loss": 0.0148, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 2.0704, | |
| "grad_norm": 0.25071224570274353, | |
| "learning_rate": 3.261845946552886e-05, | |
| "loss": 0.0135, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 2.0736, | |
| "grad_norm": 0.5742290019989014, | |
| "learning_rate": 3.2506175611947005e-05, | |
| "loss": 0.0167, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 2.0768, | |
| "grad_norm": 0.37173357605934143, | |
| "learning_rate": 3.239389175836515e-05, | |
| "loss": 0.015, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.420987069606781, | |
| "learning_rate": 3.228160790478329e-05, | |
| "loss": 0.0138, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 2.0832, | |
| "grad_norm": 0.32014745473861694, | |
| "learning_rate": 3.216932405120144e-05, | |
| "loss": 0.0128, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 2.0864, | |
| "grad_norm": 0.44675499200820923, | |
| "learning_rate": 3.205704019761958e-05, | |
| "loss": 0.0156, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 2.0896, | |
| "grad_norm": 0.6601077318191528, | |
| "learning_rate": 3.194475634403773e-05, | |
| "loss": 0.0145, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 2.0928, | |
| "grad_norm": 0.5118780732154846, | |
| "learning_rate": 3.1832472490455875e-05, | |
| "loss": 0.0147, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 2.096, | |
| "grad_norm": 0.21373315155506134, | |
| "learning_rate": 3.172018863687402e-05, | |
| "loss": 0.0121, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 2.0992, | |
| "grad_norm": 0.318210244178772, | |
| "learning_rate": 3.160790478329216e-05, | |
| "loss": 0.0163, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 2.1024, | |
| "grad_norm": 0.35482823848724365, | |
| "learning_rate": 3.149562092971031e-05, | |
| "loss": 0.0124, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 2.1056, | |
| "grad_norm": 0.6161574721336365, | |
| "learning_rate": 3.138333707612846e-05, | |
| "loss": 0.0119, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 2.1088, | |
| "grad_norm": 0.49385738372802734, | |
| "learning_rate": 3.1271053222546595e-05, | |
| "loss": 0.0157, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 2.112, | |
| "grad_norm": 0.446953147649765, | |
| "learning_rate": 3.1158769368964746e-05, | |
| "loss": 0.0149, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 2.1152, | |
| "grad_norm": 0.44312936067581177, | |
| "learning_rate": 3.104648551538289e-05, | |
| "loss": 0.0152, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 2.1184, | |
| "grad_norm": 0.5357773900032043, | |
| "learning_rate": 3.093420166180103e-05, | |
| "loss": 0.0161, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 2.1216, | |
| "grad_norm": 0.4334256947040558, | |
| "learning_rate": 3.082191780821918e-05, | |
| "loss": 0.014, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 2.1248, | |
| "grad_norm": 0.5340980291366577, | |
| "learning_rate": 3.070963395463732e-05, | |
| "loss": 0.0126, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 2.128, | |
| "grad_norm": 0.3426651060581207, | |
| "learning_rate": 3.059735010105547e-05, | |
| "loss": 0.0135, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 2.1312, | |
| "grad_norm": 0.7034550905227661, | |
| "learning_rate": 3.0485066247473616e-05, | |
| "loss": 0.0147, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 2.1344, | |
| "grad_norm": 0.4244144558906555, | |
| "learning_rate": 3.0372782393891756e-05, | |
| "loss": 0.0154, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 2.1376, | |
| "grad_norm": 0.3827649652957916, | |
| "learning_rate": 3.0260498540309907e-05, | |
| "loss": 0.0162, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 2.1408, | |
| "grad_norm": 0.35229286551475525, | |
| "learning_rate": 3.0148214686728047e-05, | |
| "loss": 0.0135, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 2.144, | |
| "grad_norm": 0.5800332427024841, | |
| "learning_rate": 3.0035930833146198e-05, | |
| "loss": 0.014, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 2.1471999999999998, | |
| "grad_norm": 0.3881014287471771, | |
| "learning_rate": 2.9923646979564342e-05, | |
| "loss": 0.0144, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 2.1504, | |
| "grad_norm": 0.315604031085968, | |
| "learning_rate": 2.9811363125982483e-05, | |
| "loss": 0.0143, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 2.1536, | |
| "grad_norm": 0.33524957299232483, | |
| "learning_rate": 2.9699079272400633e-05, | |
| "loss": 0.0132, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 2.1568, | |
| "grad_norm": 0.40424782037734985, | |
| "learning_rate": 2.9586795418818774e-05, | |
| "loss": 0.0118, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.3146498203277588, | |
| "learning_rate": 2.9474511565236924e-05, | |
| "loss": 0.013, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 2.1632, | |
| "grad_norm": 0.20534031093120575, | |
| "learning_rate": 2.9362227711655065e-05, | |
| "loss": 0.0145, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 2.1664, | |
| "grad_norm": 0.5735411047935486, | |
| "learning_rate": 2.924994385807321e-05, | |
| "loss": 0.0124, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 2.1696, | |
| "grad_norm": 0.4090406000614166, | |
| "learning_rate": 2.9137660004491356e-05, | |
| "loss": 0.014, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 2.1728, | |
| "grad_norm": 0.35889557003974915, | |
| "learning_rate": 2.90253761509095e-05, | |
| "loss": 0.0132, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 2.176, | |
| "grad_norm": 0.4188757836818695, | |
| "learning_rate": 2.8913092297327644e-05, | |
| "loss": 0.0155, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 2.1792, | |
| "grad_norm": 0.5210480093955994, | |
| "learning_rate": 2.880080844374579e-05, | |
| "loss": 0.0151, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 2.1824, | |
| "grad_norm": 0.46766239404678345, | |
| "learning_rate": 2.8688524590163935e-05, | |
| "loss": 0.011, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 2.1856, | |
| "grad_norm": 0.3399997055530548, | |
| "learning_rate": 2.8576240736582082e-05, | |
| "loss": 0.014, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 2.1888, | |
| "grad_norm": 0.39583390951156616, | |
| "learning_rate": 2.8463956883000226e-05, | |
| "loss": 0.0139, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 2.192, | |
| "grad_norm": 0.43808916211128235, | |
| "learning_rate": 2.835167302941837e-05, | |
| "loss": 0.0152, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 2.1952, | |
| "grad_norm": 0.29650449752807617, | |
| "learning_rate": 2.8239389175836517e-05, | |
| "loss": 0.0129, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 2.1984, | |
| "grad_norm": 0.5171502828598022, | |
| "learning_rate": 2.812710532225466e-05, | |
| "loss": 0.013, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 2.2016, | |
| "grad_norm": 0.45514437556266785, | |
| "learning_rate": 2.801482146867281e-05, | |
| "loss": 0.0109, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 2.2048, | |
| "grad_norm": 0.3013005256652832, | |
| "learning_rate": 2.7902537615090953e-05, | |
| "loss": 0.012, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 2.208, | |
| "grad_norm": 0.2979121804237366, | |
| "learning_rate": 2.7790253761509093e-05, | |
| "loss": 0.0216, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 2.2112, | |
| "grad_norm": 0.5382673740386963, | |
| "learning_rate": 2.7677969907927244e-05, | |
| "loss": 0.0141, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 2.2144, | |
| "grad_norm": 0.27797409892082214, | |
| "learning_rate": 2.7565686054345384e-05, | |
| "loss": 0.0157, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 2.2176, | |
| "grad_norm": 0.5483522415161133, | |
| "learning_rate": 2.7453402200763535e-05, | |
| "loss": 0.0134, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 2.2208, | |
| "grad_norm": 0.4163416922092438, | |
| "learning_rate": 2.7341118347181675e-05, | |
| "loss": 0.0124, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 2.224, | |
| "grad_norm": 0.40335503220558167, | |
| "learning_rate": 2.722883449359982e-05, | |
| "loss": 0.0162, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 2.2272, | |
| "grad_norm": 0.4244849979877472, | |
| "learning_rate": 2.7116550640017967e-05, | |
| "loss": 0.0119, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 2.2304, | |
| "grad_norm": 0.43158531188964844, | |
| "learning_rate": 2.700426678643611e-05, | |
| "loss": 0.0119, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 2.2336, | |
| "grad_norm": 0.29854100942611694, | |
| "learning_rate": 2.6891982932854258e-05, | |
| "loss": 0.014, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 2.2368, | |
| "grad_norm": 1.0485972166061401, | |
| "learning_rate": 2.6779699079272402e-05, | |
| "loss": 0.0119, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.45042625069618225, | |
| "learning_rate": 2.6667415225690546e-05, | |
| "loss": 0.0153, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.2432, | |
| "grad_norm": 0.2554261088371277, | |
| "learning_rate": 2.6555131372108693e-05, | |
| "loss": 0.013, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 2.2464, | |
| "grad_norm": 0.26204830408096313, | |
| "learning_rate": 2.6442847518526837e-05, | |
| "loss": 0.0137, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 2.2496, | |
| "grad_norm": 0.3182348906993866, | |
| "learning_rate": 2.6330563664944984e-05, | |
| "loss": 0.0121, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 2.2528, | |
| "grad_norm": 0.32103246450424194, | |
| "learning_rate": 2.6218279811363128e-05, | |
| "loss": 0.0167, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 2.2560000000000002, | |
| "grad_norm": 0.5341901779174805, | |
| "learning_rate": 2.6105995957781272e-05, | |
| "loss": 0.0125, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 2.2592, | |
| "grad_norm": 0.28886404633522034, | |
| "learning_rate": 2.599371210419942e-05, | |
| "loss": 0.0116, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 2.2624, | |
| "grad_norm": 0.8353757262229919, | |
| "learning_rate": 2.5881428250617563e-05, | |
| "loss": 0.0137, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 2.2656, | |
| "grad_norm": 0.21472221612930298, | |
| "learning_rate": 2.5769144397035704e-05, | |
| "loss": 0.0127, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 2.2688, | |
| "grad_norm": 0.34826382994651794, | |
| "learning_rate": 2.5656860543453854e-05, | |
| "loss": 0.012, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 2.2720000000000002, | |
| "grad_norm": 0.47330015897750854, | |
| "learning_rate": 2.5544576689871995e-05, | |
| "loss": 0.0135, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 2.2752, | |
| "grad_norm": 0.3030329644680023, | |
| "learning_rate": 2.5432292836290145e-05, | |
| "loss": 0.0141, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 2.2784, | |
| "grad_norm": 0.33040523529052734, | |
| "learning_rate": 2.5320008982708286e-05, | |
| "loss": 0.0127, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 2.2816, | |
| "grad_norm": 0.389413058757782, | |
| "learning_rate": 2.520772512912643e-05, | |
| "loss": 0.0111, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 2.2848, | |
| "grad_norm": 0.2874760031700134, | |
| "learning_rate": 2.509544127554458e-05, | |
| "loss": 0.0112, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 2.288, | |
| "grad_norm": 0.32801946997642517, | |
| "learning_rate": 2.498315742196272e-05, | |
| "loss": 0.0127, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 2.2912, | |
| "grad_norm": 0.3064332604408264, | |
| "learning_rate": 2.487087356838087e-05, | |
| "loss": 0.0117, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 2.2944, | |
| "grad_norm": 0.5337028503417969, | |
| "learning_rate": 2.4758589714799012e-05, | |
| "loss": 0.0131, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 2.2976, | |
| "grad_norm": 0.4230441153049469, | |
| "learning_rate": 2.464630586121716e-05, | |
| "loss": 0.012, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 2.3008, | |
| "grad_norm": 0.44456297159194946, | |
| "learning_rate": 2.4534022007635303e-05, | |
| "loss": 0.0124, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 2.304, | |
| "grad_norm": 0.3513798117637634, | |
| "learning_rate": 2.4421738154053447e-05, | |
| "loss": 0.0128, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 2.3072, | |
| "grad_norm": 0.3926803767681122, | |
| "learning_rate": 2.4309454300471595e-05, | |
| "loss": 0.0127, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 2.3104, | |
| "grad_norm": 0.4010221064090729, | |
| "learning_rate": 2.419717044688974e-05, | |
| "loss": 0.0113, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 2.3136, | |
| "grad_norm": 0.460251122713089, | |
| "learning_rate": 2.4084886593307886e-05, | |
| "loss": 0.0118, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 2.3168, | |
| "grad_norm": 0.3645784854888916, | |
| "learning_rate": 2.3972602739726026e-05, | |
| "loss": 0.0124, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.37697380781173706, | |
| "learning_rate": 2.3860318886144174e-05, | |
| "loss": 0.0151, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 2.3232, | |
| "grad_norm": 0.25935980677604675, | |
| "learning_rate": 2.3748035032562318e-05, | |
| "loss": 0.0119, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 2.3264, | |
| "grad_norm": 0.4397886097431183, | |
| "learning_rate": 2.3635751178980465e-05, | |
| "loss": 0.0123, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 2.3296, | |
| "grad_norm": 0.6620543003082275, | |
| "learning_rate": 2.352346732539861e-05, | |
| "loss": 0.0121, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 2.3327999999999998, | |
| "grad_norm": 0.4514158368110657, | |
| "learning_rate": 2.3411183471816753e-05, | |
| "loss": 0.013, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 2.336, | |
| "grad_norm": 0.2665565311908722, | |
| "learning_rate": 2.32988996182349e-05, | |
| "loss": 0.0133, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 2.3392, | |
| "grad_norm": 0.3328794836997986, | |
| "learning_rate": 2.3186615764653044e-05, | |
| "loss": 0.0135, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 2.3424, | |
| "grad_norm": 0.5438057780265808, | |
| "learning_rate": 2.307433191107119e-05, | |
| "loss": 0.0139, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 2.3456, | |
| "grad_norm": 0.22129549086093903, | |
| "learning_rate": 2.2962048057489335e-05, | |
| "loss": 0.0107, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 2.3487999999999998, | |
| "grad_norm": 0.5026663541793823, | |
| "learning_rate": 2.284976420390748e-05, | |
| "loss": 0.0131, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 2.352, | |
| "grad_norm": 0.40014806389808655, | |
| "learning_rate": 2.2737480350325623e-05, | |
| "loss": 0.0132, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 2.3552, | |
| "grad_norm": 0.2861398458480835, | |
| "learning_rate": 2.262519649674377e-05, | |
| "loss": 0.012, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 2.3584, | |
| "grad_norm": 0.4113241732120514, | |
| "learning_rate": 2.2512912643161914e-05, | |
| "loss": 0.0116, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 2.3616, | |
| "grad_norm": 0.5746475458145142, | |
| "learning_rate": 2.2400628789580058e-05, | |
| "loss": 0.0133, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 2.3648, | |
| "grad_norm": 0.48967674374580383, | |
| "learning_rate": 2.2288344935998205e-05, | |
| "loss": 0.0136, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 2.368, | |
| "grad_norm": 0.32669082283973694, | |
| "learning_rate": 2.217606108241635e-05, | |
| "loss": 0.0108, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 2.3712, | |
| "grad_norm": 0.41775158047676086, | |
| "learning_rate": 2.2063777228834496e-05, | |
| "loss": 0.0128, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 2.3744, | |
| "grad_norm": 0.549651563167572, | |
| "learning_rate": 2.195149337525264e-05, | |
| "loss": 0.0131, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 2.3776, | |
| "grad_norm": 0.40829822421073914, | |
| "learning_rate": 2.1839209521670784e-05, | |
| "loss": 0.0104, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 2.3808, | |
| "grad_norm": 0.41334331035614014, | |
| "learning_rate": 2.1726925668088928e-05, | |
| "loss": 0.0109, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 2.384, | |
| "grad_norm": 0.3358791172504425, | |
| "learning_rate": 2.1614641814507075e-05, | |
| "loss": 0.0112, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 2.3872, | |
| "grad_norm": 0.3842921257019043, | |
| "learning_rate": 2.150235796092522e-05, | |
| "loss": 0.0117, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 2.3904, | |
| "grad_norm": 0.2613106966018677, | |
| "learning_rate": 2.1390074107343367e-05, | |
| "loss": 0.011, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 2.3936, | |
| "grad_norm": 0.23129789531230927, | |
| "learning_rate": 2.127779025376151e-05, | |
| "loss": 0.0118, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 2.3968, | |
| "grad_norm": 0.4522503912448883, | |
| "learning_rate": 2.1165506400179654e-05, | |
| "loss": 0.0131, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.28017547726631165, | |
| "learning_rate": 2.10532225465978e-05, | |
| "loss": 0.0135, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.4032, | |
| "grad_norm": 0.5742918252944946, | |
| "learning_rate": 2.0940938693015946e-05, | |
| "loss": 0.0125, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 2.4064, | |
| "grad_norm": 0.2145901769399643, | |
| "learning_rate": 2.082865483943409e-05, | |
| "loss": 0.0106, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 2.4096, | |
| "grad_norm": 0.4646471440792084, | |
| "learning_rate": 2.0716370985852233e-05, | |
| "loss": 0.0149, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 2.4128, | |
| "grad_norm": 0.2790127098560333, | |
| "learning_rate": 2.060408713227038e-05, | |
| "loss": 0.0145, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 2.416, | |
| "grad_norm": 0.3615422546863556, | |
| "learning_rate": 2.0491803278688525e-05, | |
| "loss": 0.0143, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 2.4192, | |
| "grad_norm": 0.4171338379383087, | |
| "learning_rate": 2.0379519425106672e-05, | |
| "loss": 0.0135, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 2.4224, | |
| "grad_norm": 0.3592493236064911, | |
| "learning_rate": 2.0267235571524816e-05, | |
| "loss": 0.0136, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 2.4256, | |
| "grad_norm": 0.4570198655128479, | |
| "learning_rate": 2.015495171794296e-05, | |
| "loss": 0.0109, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 2.4288, | |
| "grad_norm": 0.5207013487815857, | |
| "learning_rate": 2.0042667864361107e-05, | |
| "loss": 0.0137, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 2.432, | |
| "grad_norm": 0.39944949746131897, | |
| "learning_rate": 1.993038401077925e-05, | |
| "loss": 0.012, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 2.4352, | |
| "grad_norm": 0.305179625749588, | |
| "learning_rate": 1.9818100157197398e-05, | |
| "loss": 0.012, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 2.4384, | |
| "grad_norm": 0.4360312223434448, | |
| "learning_rate": 1.970581630361554e-05, | |
| "loss": 0.0108, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 2.4416, | |
| "grad_norm": 0.5267120599746704, | |
| "learning_rate": 1.9593532450033686e-05, | |
| "loss": 0.0108, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 2.4448, | |
| "grad_norm": 0.39714694023132324, | |
| "learning_rate": 1.948124859645183e-05, | |
| "loss": 0.0122, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 2.448, | |
| "grad_norm": 0.3602929711341858, | |
| "learning_rate": 1.9368964742869977e-05, | |
| "loss": 0.0117, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 2.4512, | |
| "grad_norm": 0.5214937925338745, | |
| "learning_rate": 1.925668088928812e-05, | |
| "loss": 0.0128, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 2.4544, | |
| "grad_norm": 0.26656728982925415, | |
| "learning_rate": 1.9144397035706265e-05, | |
| "loss": 0.0111, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 2.4576000000000002, | |
| "grad_norm": 0.32076728343963623, | |
| "learning_rate": 1.9032113182124412e-05, | |
| "loss": 0.0125, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 2.4608, | |
| "grad_norm": 0.49612459540367126, | |
| "learning_rate": 1.8919829328542556e-05, | |
| "loss": 0.0132, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 2.464, | |
| "grad_norm": 0.35860371589660645, | |
| "learning_rate": 1.8807545474960703e-05, | |
| "loss": 0.0117, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 2.4672, | |
| "grad_norm": 0.3510790169239044, | |
| "learning_rate": 1.8695261621378844e-05, | |
| "loss": 0.0101, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 2.4704, | |
| "grad_norm": 0.4230336546897888, | |
| "learning_rate": 1.858297776779699e-05, | |
| "loss": 0.0122, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 2.4736000000000002, | |
| "grad_norm": 0.2450677901506424, | |
| "learning_rate": 1.847069391421514e-05, | |
| "loss": 0.01, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 2.4768, | |
| "grad_norm": 0.29541265964508057, | |
| "learning_rate": 1.8358410060633282e-05, | |
| "loss": 0.0112, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.31477123498916626, | |
| "learning_rate": 1.8246126207051426e-05, | |
| "loss": 0.011, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 2.4832, | |
| "grad_norm": 0.4507298469543457, | |
| "learning_rate": 1.813384235346957e-05, | |
| "loss": 0.0099, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 2.4864, | |
| "grad_norm": 0.5776090025901794, | |
| "learning_rate": 1.8021558499887718e-05, | |
| "loss": 0.0103, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 2.4896, | |
| "grad_norm": 0.4945872128009796, | |
| "learning_rate": 1.790927464630586e-05, | |
| "loss": 0.0118, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 2.4928, | |
| "grad_norm": 0.5310337543487549, | |
| "learning_rate": 1.779699079272401e-05, | |
| "loss": 0.0124, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 2.496, | |
| "grad_norm": 0.4604105055332184, | |
| "learning_rate": 1.7684706939142153e-05, | |
| "loss": 0.0109, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 2.4992, | |
| "grad_norm": 0.27232497930526733, | |
| "learning_rate": 1.7572423085560296e-05, | |
| "loss": 0.0106, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 2.5023999999999997, | |
| "grad_norm": 0.28912702202796936, | |
| "learning_rate": 1.7460139231978444e-05, | |
| "loss": 0.0121, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 2.5056000000000003, | |
| "grad_norm": 0.30106881260871887, | |
| "learning_rate": 1.7347855378396588e-05, | |
| "loss": 0.0122, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 2.5088, | |
| "grad_norm": 0.5164341926574707, | |
| "learning_rate": 1.7235571524814735e-05, | |
| "loss": 0.0106, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 2.512, | |
| "grad_norm": 0.24304376542568207, | |
| "learning_rate": 1.7123287671232875e-05, | |
| "loss": 0.0102, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 2.5152, | |
| "grad_norm": 0.41402265429496765, | |
| "learning_rate": 1.7011003817651023e-05, | |
| "loss": 0.0101, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 2.5183999999999997, | |
| "grad_norm": 0.32743728160858154, | |
| "learning_rate": 1.6898719964069167e-05, | |
| "loss": 0.0102, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 2.5216, | |
| "grad_norm": 0.3504003584384918, | |
| "learning_rate": 1.6786436110487314e-05, | |
| "loss": 0.0114, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 2.5248, | |
| "grad_norm": 0.36035361886024475, | |
| "learning_rate": 1.6674152256905458e-05, | |
| "loss": 0.0103, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 2.528, | |
| "grad_norm": 0.26209500432014465, | |
| "learning_rate": 1.6561868403323602e-05, | |
| "loss": 0.011, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 2.5312, | |
| "grad_norm": 0.3647664487361908, | |
| "learning_rate": 1.644958454974175e-05, | |
| "loss": 0.0131, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 2.5343999999999998, | |
| "grad_norm": 0.2963014245033264, | |
| "learning_rate": 1.6337300696159893e-05, | |
| "loss": 0.0121, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 2.5376, | |
| "grad_norm": 0.2879433333873749, | |
| "learning_rate": 1.622501684257804e-05, | |
| "loss": 0.01, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 2.5408, | |
| "grad_norm": 0.23436123132705688, | |
| "learning_rate": 1.611273298899618e-05, | |
| "loss": 0.0102, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 2.544, | |
| "grad_norm": 0.26142388582229614, | |
| "learning_rate": 1.6000449135414328e-05, | |
| "loss": 0.0142, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 2.5472, | |
| "grad_norm": 0.5176597237586975, | |
| "learning_rate": 1.5888165281832472e-05, | |
| "loss": 0.011, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 2.5504, | |
| "grad_norm": 0.31834036111831665, | |
| "learning_rate": 1.577588142825062e-05, | |
| "loss": 0.0096, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 2.5536, | |
| "grad_norm": 0.39758211374282837, | |
| "learning_rate": 1.5663597574668763e-05, | |
| "loss": 0.0142, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 2.5568, | |
| "grad_norm": 0.5864192843437195, | |
| "learning_rate": 1.5551313721086907e-05, | |
| "loss": 0.0109, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.543253481388092, | |
| "learning_rate": 1.5439029867505054e-05, | |
| "loss": 0.0094, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.5632, | |
| "grad_norm": 0.5447654128074646, | |
| "learning_rate": 1.5326746013923198e-05, | |
| "loss": 0.0109, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 2.5664, | |
| "grad_norm": 0.4796806573867798, | |
| "learning_rate": 1.5214462160341344e-05, | |
| "loss": 0.0116, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 2.5696, | |
| "grad_norm": 0.2941898703575134, | |
| "learning_rate": 1.5102178306759488e-05, | |
| "loss": 0.0143, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 2.5728, | |
| "grad_norm": 0.25936993956565857, | |
| "learning_rate": 1.4989894453177633e-05, | |
| "loss": 0.0122, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 2.576, | |
| "grad_norm": 0.1301080286502838, | |
| "learning_rate": 1.4877610599595779e-05, | |
| "loss": 0.0104, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 2.5792, | |
| "grad_norm": 0.5463783144950867, | |
| "learning_rate": 1.4765326746013925e-05, | |
| "loss": 0.0155, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 2.5824, | |
| "grad_norm": 0.39639008045196533, | |
| "learning_rate": 1.465304289243207e-05, | |
| "loss": 0.0113, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 2.5856, | |
| "grad_norm": 0.4171694219112396, | |
| "learning_rate": 1.4540759038850212e-05, | |
| "loss": 0.0116, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 2.5888, | |
| "grad_norm": 0.2964138686656952, | |
| "learning_rate": 1.4428475185268358e-05, | |
| "loss": 0.0097, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 2.592, | |
| "grad_norm": 0.37794455885887146, | |
| "learning_rate": 1.4316191331686504e-05, | |
| "loss": 0.01, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.5952, | |
| "grad_norm": 0.4565639793872833, | |
| "learning_rate": 1.4203907478104649e-05, | |
| "loss": 0.0111, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 2.5984, | |
| "grad_norm": 0.26303786039352417, | |
| "learning_rate": 1.4091623624522796e-05, | |
| "loss": 0.0099, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 2.6016, | |
| "grad_norm": 0.33677536249160767, | |
| "learning_rate": 1.3979339770940939e-05, | |
| "loss": 0.0101, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 2.6048, | |
| "grad_norm": 0.24560488760471344, | |
| "learning_rate": 1.3867055917359084e-05, | |
| "loss": 0.0112, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 2.608, | |
| "grad_norm": 0.31233885884284973, | |
| "learning_rate": 1.375477206377723e-05, | |
| "loss": 0.0145, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 2.6112, | |
| "grad_norm": 0.40737399458885193, | |
| "learning_rate": 1.3642488210195375e-05, | |
| "loss": 0.0098, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 2.6144, | |
| "grad_norm": 0.3656460642814636, | |
| "learning_rate": 1.3530204356613518e-05, | |
| "loss": 0.0109, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 2.6176, | |
| "grad_norm": 0.23487180471420288, | |
| "learning_rate": 1.3417920503031663e-05, | |
| "loss": 0.0109, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 2.6208, | |
| "grad_norm": 0.27709895372390747, | |
| "learning_rate": 1.330563664944981e-05, | |
| "loss": 0.0104, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 2.624, | |
| "grad_norm": 0.3768002986907959, | |
| "learning_rate": 1.3193352795867956e-05, | |
| "loss": 0.0107, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 2.6272, | |
| "grad_norm": 0.2219514399766922, | |
| "learning_rate": 1.3081068942286102e-05, | |
| "loss": 0.009, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 2.6304, | |
| "grad_norm": 0.3634709119796753, | |
| "learning_rate": 1.2968785088704244e-05, | |
| "loss": 0.0107, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 2.6336, | |
| "grad_norm": 0.4309711456298828, | |
| "learning_rate": 1.285650123512239e-05, | |
| "loss": 0.0118, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 2.6368, | |
| "grad_norm": 0.22426378726959229, | |
| "learning_rate": 1.2744217381540535e-05, | |
| "loss": 0.0092, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.4642464816570282, | |
| "learning_rate": 1.263193352795868e-05, | |
| "loss": 0.0101, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 2.6432, | |
| "grad_norm": 0.3424983024597168, | |
| "learning_rate": 1.2519649674376826e-05, | |
| "loss": 0.0122, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 2.6464, | |
| "grad_norm": 0.4165264070034027, | |
| "learning_rate": 1.240736582079497e-05, | |
| "loss": 0.0129, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 2.6496, | |
| "grad_norm": 0.23779606819152832, | |
| "learning_rate": 1.2295081967213116e-05, | |
| "loss": 0.009, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.6528, | |
| "grad_norm": 0.3217007517814636, | |
| "learning_rate": 1.2182798113631261e-05, | |
| "loss": 0.014, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 2.656, | |
| "grad_norm": 0.2765938937664032, | |
| "learning_rate": 1.2070514260049405e-05, | |
| "loss": 0.0118, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 2.6592000000000002, | |
| "grad_norm": 0.2680663466453552, | |
| "learning_rate": 1.1958230406467551e-05, | |
| "loss": 0.0114, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 2.6624, | |
| "grad_norm": 0.27196067571640015, | |
| "learning_rate": 1.1845946552885695e-05, | |
| "loss": 0.0119, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 2.6656, | |
| "grad_norm": 0.3205728530883789, | |
| "learning_rate": 1.173366269930384e-05, | |
| "loss": 0.0099, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 2.6688, | |
| "grad_norm": 0.5267428159713745, | |
| "learning_rate": 1.1621378845721986e-05, | |
| "loss": 0.0114, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.672, | |
| "grad_norm": 0.3181384801864624, | |
| "learning_rate": 1.150909499214013e-05, | |
| "loss": 0.0089, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 2.6752000000000002, | |
| "grad_norm": 0.23899194598197937, | |
| "learning_rate": 1.1396811138558275e-05, | |
| "loss": 0.0134, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 2.6784, | |
| "grad_norm": 0.3764638602733612, | |
| "learning_rate": 1.1284527284976421e-05, | |
| "loss": 0.0113, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 2.6816, | |
| "grad_norm": 0.46377673745155334, | |
| "learning_rate": 1.1172243431394567e-05, | |
| "loss": 0.0136, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 2.6848, | |
| "grad_norm": 0.5233606696128845, | |
| "learning_rate": 1.105995957781271e-05, | |
| "loss": 0.0105, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 2.6879999999999997, | |
| "grad_norm": 0.3869042694568634, | |
| "learning_rate": 1.0947675724230856e-05, | |
| "loss": 0.0108, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.6912000000000003, | |
| "grad_norm": 0.2652919292449951, | |
| "learning_rate": 1.0835391870649002e-05, | |
| "loss": 0.0094, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 2.6944, | |
| "grad_norm": 0.24004903435707092, | |
| "learning_rate": 1.0723108017067146e-05, | |
| "loss": 0.012, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 2.6976, | |
| "grad_norm": 0.3093971610069275, | |
| "learning_rate": 1.0610824163485291e-05, | |
| "loss": 0.0091, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 2.7008, | |
| "grad_norm": 0.3612852394580841, | |
| "learning_rate": 1.0498540309903437e-05, | |
| "loss": 0.0124, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 2.7039999999999997, | |
| "grad_norm": 0.37125304341316223, | |
| "learning_rate": 1.0386256456321582e-05, | |
| "loss": 0.0106, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 2.7072000000000003, | |
| "grad_norm": 0.3602662682533264, | |
| "learning_rate": 1.0273972602739726e-05, | |
| "loss": 0.0092, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.7104, | |
| "grad_norm": 0.29950714111328125, | |
| "learning_rate": 1.0161688749157872e-05, | |
| "loss": 0.0094, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 2.7136, | |
| "grad_norm": 0.4257677495479584, | |
| "learning_rate": 1.0049404895576018e-05, | |
| "loss": 0.0097, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 2.7168, | |
| "grad_norm": 0.3441554605960846, | |
| "learning_rate": 9.937121041994161e-06, | |
| "loss": 0.0109, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 0.40362268686294556, | |
| "learning_rate": 9.824837188412307e-06, | |
| "loss": 0.0108, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.7232, | |
| "grad_norm": 0.3581705689430237, | |
| "learning_rate": 9.712553334830451e-06, | |
| "loss": 0.0098, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 2.7264, | |
| "grad_norm": 0.4037614166736603, | |
| "learning_rate": 9.600269481248596e-06, | |
| "loss": 0.0107, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.7296, | |
| "grad_norm": 0.34148481488227844, | |
| "learning_rate": 9.487985627666742e-06, | |
| "loss": 0.0086, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 2.7328, | |
| "grad_norm": 0.4397309720516205, | |
| "learning_rate": 9.375701774084888e-06, | |
| "loss": 0.0101, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 2.7359999999999998, | |
| "grad_norm": 0.22799618542194366, | |
| "learning_rate": 9.263417920503033e-06, | |
| "loss": 0.0111, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 2.7392, | |
| "grad_norm": 0.26248088479042053, | |
| "learning_rate": 9.151134066921177e-06, | |
| "loss": 0.0085, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 2.7424, | |
| "grad_norm": 0.224031463265419, | |
| "learning_rate": 9.038850213339323e-06, | |
| "loss": 0.0107, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 2.7456, | |
| "grad_norm": 0.39990928769111633, | |
| "learning_rate": 8.926566359757467e-06, | |
| "loss": 0.0098, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.7488, | |
| "grad_norm": 0.35376614332199097, | |
| "learning_rate": 8.814282506175612e-06, | |
| "loss": 0.012, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 2.752, | |
| "grad_norm": 0.32684022188186646, | |
| "learning_rate": 8.701998652593756e-06, | |
| "loss": 0.009, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 2.7552, | |
| "grad_norm": 0.47101569175720215, | |
| "learning_rate": 8.589714799011902e-06, | |
| "loss": 0.0175, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 2.7584, | |
| "grad_norm": 0.610358476638794, | |
| "learning_rate": 8.477430945430049e-06, | |
| "loss": 0.0136, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 2.7616, | |
| "grad_norm": 0.32306498289108276, | |
| "learning_rate": 8.365147091848193e-06, | |
| "loss": 0.0102, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 2.7648, | |
| "grad_norm": 0.3570367097854614, | |
| "learning_rate": 8.252863238266339e-06, | |
| "loss": 0.0112, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.768, | |
| "grad_norm": 0.28840968012809753, | |
| "learning_rate": 8.140579384684482e-06, | |
| "loss": 0.0123, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 2.7712, | |
| "grad_norm": 0.335059255361557, | |
| "learning_rate": 8.028295531102628e-06, | |
| "loss": 0.0106, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.7744, | |
| "grad_norm": 0.6081422567367554, | |
| "learning_rate": 7.916011677520772e-06, | |
| "loss": 0.0094, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.7776, | |
| "grad_norm": 0.2275940179824829, | |
| "learning_rate": 7.803727823938918e-06, | |
| "loss": 0.0099, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.7808, | |
| "grad_norm": 0.5723327398300171, | |
| "learning_rate": 7.691443970357063e-06, | |
| "loss": 0.0105, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.784, | |
| "grad_norm": 0.3414275646209717, | |
| "learning_rate": 7.579160116775208e-06, | |
| "loss": 0.0116, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.7872, | |
| "grad_norm": 0.5280483961105347, | |
| "learning_rate": 7.4668762631933535e-06, | |
| "loss": 0.0109, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.7904, | |
| "grad_norm": 0.31701862812042236, | |
| "learning_rate": 7.354592409611498e-06, | |
| "loss": 0.0111, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.7936, | |
| "grad_norm": 0.3755703270435333, | |
| "learning_rate": 7.242308556029644e-06, | |
| "loss": 0.0099, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.7968, | |
| "grad_norm": 0.4873414635658264, | |
| "learning_rate": 7.130024702447788e-06, | |
| "loss": 0.011, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.22687934339046478, | |
| "learning_rate": 7.017740848865933e-06, | |
| "loss": 0.0097, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.8032, | |
| "grad_norm": 0.24188542366027832, | |
| "learning_rate": 6.905456995284078e-06, | |
| "loss": 0.0098, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.8064, | |
| "grad_norm": 0.382225900888443, | |
| "learning_rate": 6.793173141702224e-06, | |
| "loss": 0.0107, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.8096, | |
| "grad_norm": 0.40191715955734253, | |
| "learning_rate": 6.680889288120369e-06, | |
| "loss": 0.0121, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.8128, | |
| "grad_norm": 0.22877512872219086, | |
| "learning_rate": 6.568605434538513e-06, | |
| "loss": 0.0106, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.816, | |
| "grad_norm": 0.24837732315063477, | |
| "learning_rate": 6.456321580956659e-06, | |
| "loss": 0.0115, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.8192, | |
| "grad_norm": 0.2799074053764343, | |
| "learning_rate": 6.3440377273748035e-06, | |
| "loss": 0.0112, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.8224, | |
| "grad_norm": 0.3034617006778717, | |
| "learning_rate": 6.231753873792949e-06, | |
| "loss": 0.0092, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.8256, | |
| "grad_norm": 0.20725731551647186, | |
| "learning_rate": 6.119470020211094e-06, | |
| "loss": 0.01, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.8288, | |
| "grad_norm": 0.26357269287109375, | |
| "learning_rate": 6.007186166629239e-06, | |
| "loss": 0.0095, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.832, | |
| "grad_norm": 0.28756099939346313, | |
| "learning_rate": 5.894902313047384e-06, | |
| "loss": 0.0107, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.8352, | |
| "grad_norm": 0.2845691442489624, | |
| "learning_rate": 5.782618459465529e-06, | |
| "loss": 0.0096, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.8384, | |
| "grad_norm": 0.5302965044975281, | |
| "learning_rate": 5.6703346058836745e-06, | |
| "loss": 0.0095, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.8416, | |
| "grad_norm": 0.32336851954460144, | |
| "learning_rate": 5.558050752301819e-06, | |
| "loss": 0.0102, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.8448, | |
| "grad_norm": 0.2314489632844925, | |
| "learning_rate": 5.445766898719965e-06, | |
| "loss": 0.0111, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.848, | |
| "grad_norm": 0.47185102105140686, | |
| "learning_rate": 5.33348304513811e-06, | |
| "loss": 0.0119, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.8512, | |
| "grad_norm": 0.2183176875114441, | |
| "learning_rate": 5.221199191556254e-06, | |
| "loss": 0.0106, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.8544, | |
| "grad_norm": 0.21474814414978027, | |
| "learning_rate": 5.108915337974399e-06, | |
| "loss": 0.0098, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.8576, | |
| "grad_norm": 0.2698668837547302, | |
| "learning_rate": 4.996631484392545e-06, | |
| "loss": 0.0088, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.8608000000000002, | |
| "grad_norm": 0.24269291758537292, | |
| "learning_rate": 4.8843476308106895e-06, | |
| "loss": 0.0106, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.864, | |
| "grad_norm": 0.4165436029434204, | |
| "learning_rate": 4.772063777228835e-06, | |
| "loss": 0.008, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.8672, | |
| "grad_norm": 0.31317052245140076, | |
| "learning_rate": 4.65977992364698e-06, | |
| "loss": 0.0088, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.8704, | |
| "grad_norm": 0.38762250542640686, | |
| "learning_rate": 4.5474960700651246e-06, | |
| "loss": 0.0083, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.8736, | |
| "grad_norm": 0.2870713174343109, | |
| "learning_rate": 4.43521221648327e-06, | |
| "loss": 0.0099, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.8768000000000002, | |
| "grad_norm": 0.27686795592308044, | |
| "learning_rate": 4.322928362901415e-06, | |
| "loss": 0.0101, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.3446589708328247, | |
| "learning_rate": 4.21064450931956e-06, | |
| "loss": 0.0106, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.8832, | |
| "grad_norm": 0.5736850500106812, | |
| "learning_rate": 4.098360655737704e-06, | |
| "loss": 0.0116, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.8864, | |
| "grad_norm": 0.32958129048347473, | |
| "learning_rate": 3.986076802155851e-06, | |
| "loss": 0.0093, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.8895999999999997, | |
| "grad_norm": 0.40474453568458557, | |
| "learning_rate": 3.873792948573996e-06, | |
| "loss": 0.008, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.8928000000000003, | |
| "grad_norm": 0.2104715257883072, | |
| "learning_rate": 3.7615090949921403e-06, | |
| "loss": 0.0106, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.896, | |
| "grad_norm": 0.4145047068595886, | |
| "learning_rate": 3.6492252414102855e-06, | |
| "loss": 0.0094, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.8992, | |
| "grad_norm": 0.4830983877182007, | |
| "learning_rate": 3.5369413878284303e-06, | |
| "loss": 0.0084, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.9024, | |
| "grad_norm": 0.2935122549533844, | |
| "learning_rate": 3.4246575342465754e-06, | |
| "loss": 0.0086, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.9055999999999997, | |
| "grad_norm": 0.6376447081565857, | |
| "learning_rate": 3.31237368066472e-06, | |
| "loss": 0.0103, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.9088000000000003, | |
| "grad_norm": 0.2705903947353363, | |
| "learning_rate": 3.2000898270828658e-06, | |
| "loss": 0.0105, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.912, | |
| "grad_norm": 0.23977629840373993, | |
| "learning_rate": 3.0878059735010105e-06, | |
| "loss": 0.009, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.9152, | |
| "grad_norm": 0.20728209614753723, | |
| "learning_rate": 2.9755221199191557e-06, | |
| "loss": 0.0111, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.9184, | |
| "grad_norm": 0.5078235864639282, | |
| "learning_rate": 2.863238266337301e-06, | |
| "loss": 0.0099, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.9215999999999998, | |
| "grad_norm": 0.28868502378463745, | |
| "learning_rate": 2.7509544127554456e-06, | |
| "loss": 0.0093, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.9248, | |
| "grad_norm": 0.161485955119133, | |
| "learning_rate": 2.638670559173591e-06, | |
| "loss": 0.0081, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.928, | |
| "grad_norm": 0.3213595747947693, | |
| "learning_rate": 2.526386705591736e-06, | |
| "loss": 0.0083, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.9312, | |
| "grad_norm": 0.3129395842552185, | |
| "learning_rate": 2.414102852009881e-06, | |
| "loss": 0.0095, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.9344, | |
| "grad_norm": 0.3195973336696625, | |
| "learning_rate": 2.301818998428026e-06, | |
| "loss": 0.0098, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.9375999999999998, | |
| "grad_norm": 0.47745078802108765, | |
| "learning_rate": 2.1895351448461715e-06, | |
| "loss": 0.0107, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.9408, | |
| "grad_norm": 0.341581255197525, | |
| "learning_rate": 2.0772512912643162e-06, | |
| "loss": 0.0085, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.944, | |
| "grad_norm": 0.2497643530368805, | |
| "learning_rate": 1.9649674376824614e-06, | |
| "loss": 0.0114, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.9472, | |
| "grad_norm": 0.2573058009147644, | |
| "learning_rate": 1.8526835841006064e-06, | |
| "loss": 0.0108, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.9504, | |
| "grad_norm": 0.25984275341033936, | |
| "learning_rate": 1.7403997305187515e-06, | |
| "loss": 0.0096, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.9536, | |
| "grad_norm": 0.24927963316440582, | |
| "learning_rate": 1.6281158769368965e-06, | |
| "loss": 0.0091, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.9568, | |
| "grad_norm": 0.3088102340698242, | |
| "learning_rate": 1.5158320233550417e-06, | |
| "loss": 0.0078, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.31416431069374084, | |
| "learning_rate": 1.4035481697731866e-06, | |
| "loss": 0.0085, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.9632, | |
| "grad_norm": 0.28967756032943726, | |
| "learning_rate": 1.2912643161913318e-06, | |
| "loss": 0.0085, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.9664, | |
| "grad_norm": 0.30276378989219666, | |
| "learning_rate": 1.1789804626094768e-06, | |
| "loss": 0.0084, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.9696, | |
| "grad_norm": 0.4137653708457947, | |
| "learning_rate": 1.066696609027622e-06, | |
| "loss": 0.0082, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.9728, | |
| "grad_norm": 0.4135017991065979, | |
| "learning_rate": 9.544127554457669e-07, | |
| "loss": 0.0102, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.976, | |
| "grad_norm": 0.3558090031147003, | |
| "learning_rate": 8.421289018639121e-07, | |
| "loss": 0.0096, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.9792, | |
| "grad_norm": 0.32541632652282715, | |
| "learning_rate": 7.298450482820571e-07, | |
| "loss": 0.0084, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.9824, | |
| "grad_norm": 0.24068425595760345, | |
| "learning_rate": 6.175611947002022e-07, | |
| "loss": 0.0088, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.9856, | |
| "grad_norm": 0.4023025333881378, | |
| "learning_rate": 5.052773411183473e-07, | |
| "loss": 0.0105, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.9888, | |
| "grad_norm": 0.29659387469291687, | |
| "learning_rate": 3.9299348753649227e-07, | |
| "loss": 0.0078, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.992, | |
| "grad_norm": 0.28904858231544495, | |
| "learning_rate": 2.8070963395463734e-07, | |
| "loss": 0.0083, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.9952, | |
| "grad_norm": 0.29798611998558044, | |
| "learning_rate": 1.684257803727824e-07, | |
| "loss": 0.0091, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.9984, | |
| "grad_norm": 0.3297630250453949, | |
| "learning_rate": 5.6141926790927474e-08, | |
| "loss": 0.0124, | |
| "step": 9370 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 9375, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |