| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 6250, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0032, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 2.132196162046908e-06, | |
| "loss": 25.0099, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0064, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.264392324093816e-06, | |
| "loss": 21.1499, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0096, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 6.396588486140726e-06, | |
| "loss": 19.6111, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0128, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.528784648187633e-06, | |
| "loss": 22.3722, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 1.0660980810234541e-05, | |
| "loss": 20.1462, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0192, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.2793176972281452e-05, | |
| "loss": 20.4235, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0224, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.4925373134328357e-05, | |
| "loss": 20.7853, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0256, | |
| "grad_norm": 10.0, | |
| "learning_rate": 1.7057569296375266e-05, | |
| "loss": 19.5459, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0288, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 1.9189765458422178e-05, | |
| "loss": 18.4778, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 2.1321961620469083e-05, | |
| "loss": 16.2191, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0352, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.345415778251599e-05, | |
| "loss": 18.5773, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0384, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 2.5586353944562904e-05, | |
| "loss": 15.9389, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0416, | |
| "grad_norm": 10.0, | |
| "learning_rate": 2.771855010660981e-05, | |
| "loss": 15.6064, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0448, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 2.9850746268656714e-05, | |
| "loss": 15.6366, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 3.1982942430703626e-05, | |
| "loss": 16.7232, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0512, | |
| "grad_norm": 10.0, | |
| "learning_rate": 3.411513859275053e-05, | |
| "loss": 13.2957, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0544, | |
| "grad_norm": 10.0, | |
| "learning_rate": 3.624733475479744e-05, | |
| "loss": 13.3429, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0576, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 3.8379530916844355e-05, | |
| "loss": 13.3301, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0608, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 4.051172707889126e-05, | |
| "loss": 15.1708, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.2643923240938166e-05, | |
| "loss": 14.8533, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0672, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.477611940298508e-05, | |
| "loss": 11.6389, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0704, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 4.690831556503198e-05, | |
| "loss": 12.2515, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0736, | |
| "grad_norm": 10.0, | |
| "learning_rate": 4.904051172707889e-05, | |
| "loss": 12.6637, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0768, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 5.117270788912581e-05, | |
| "loss": 10.6091, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 5.330490405117271e-05, | |
| "loss": 10.9362, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.0832, | |
| "grad_norm": 10.0, | |
| "learning_rate": 5.543710021321962e-05, | |
| "loss": 12.4545, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0864, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 5.756929637526652e-05, | |
| "loss": 11.5238, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.0896, | |
| "grad_norm": 10.0, | |
| "learning_rate": 5.970149253731343e-05, | |
| "loss": 10.8966, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0928, | |
| "grad_norm": 10.0, | |
| "learning_rate": 6.183368869936035e-05, | |
| "loss": 10.5903, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 10.0, | |
| "learning_rate": 6.396588486140725e-05, | |
| "loss": 10.4872, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0992, | |
| "grad_norm": 10.0, | |
| "learning_rate": 6.609808102345416e-05, | |
| "loss": 9.9663, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1024, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 6.823027718550106e-05, | |
| "loss": 9.4655, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1056, | |
| "grad_norm": 10.0, | |
| "learning_rate": 7.036247334754798e-05, | |
| "loss": 10.487, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1088, | |
| "grad_norm": 10.000001907348633, | |
| "learning_rate": 7.249466950959489e-05, | |
| "loss": 9.3626, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 10.0, | |
| "learning_rate": 7.46268656716418e-05, | |
| "loss": 9.864, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1152, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 7.675906183368871e-05, | |
| "loss": 8.8008, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1184, | |
| "grad_norm": 10.0, | |
| "learning_rate": 7.889125799573562e-05, | |
| "loss": 9.7772, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1216, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.102345415778252e-05, | |
| "loss": 8.7418, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.1248, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.315565031982943e-05, | |
| "loss": 9.1157, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 8.528784648187633e-05, | |
| "loss": 8.1395, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1312, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.742004264392325e-05, | |
| "loss": 8.8305, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1344, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.955223880597016e-05, | |
| "loss": 9.2089, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1376, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.168443496801706e-05, | |
| "loss": 8.5206, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.1408, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.381663113006397e-05, | |
| "loss": 8.8993, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.594882729211087e-05, | |
| "loss": 8.8173, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.1472, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.808102345415778e-05, | |
| "loss": 8.3408, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1504, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.998877161464182e-05, | |
| "loss": 7.5853, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1536, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.987648776105997e-05, | |
| "loss": 7.4544, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1568, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.97642039074781e-05, | |
| "loss": 8.0499, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.965192005389625e-05, | |
| "loss": 8.2335, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1632, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.95396362003144e-05, | |
| "loss": 7.0574, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.1664, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.942735234673256e-05, | |
| "loss": 6.8412, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.1696, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.931506849315069e-05, | |
| "loss": 7.1418, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.1728, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.920278463956883e-05, | |
| "loss": 7.1722, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.909050078598698e-05, | |
| "loss": 6.7641, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.1792, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.897821693240512e-05, | |
| "loss": 6.3646, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.1824, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.886593307882327e-05, | |
| "loss": 6.3469, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1856, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.875364922524142e-05, | |
| "loss": 6.38, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1888, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.864136537165956e-05, | |
| "loss": 5.8154, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.852908151807771e-05, | |
| "loss": 5.6794, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.1952, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.841679766449586e-05, | |
| "loss": 5.4415, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1984, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.8304513810914e-05, | |
| "loss": 4.7814, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2016, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.819222995733213e-05, | |
| "loss": 4.8304, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2048, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.807994610375028e-05, | |
| "loss": 4.9017, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.796766225016843e-05, | |
| "loss": 4.5826, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2112, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.785537839658657e-05, | |
| "loss": 4.4032, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2144, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.774309454300472e-05, | |
| "loss": 4.4789, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2176, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.763081068942287e-05, | |
| "loss": 3.5318, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2208, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.751852683584101e-05, | |
| "loss": 3.9803, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.740624298225916e-05, | |
| "loss": 3.3411, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2272, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.729395912867731e-05, | |
| "loss": 3.8421, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2304, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.718167527509545e-05, | |
| "loss": 3.8598, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2336, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.706939142151358e-05, | |
| "loss": 3.6175, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2368, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.695710756793174e-05, | |
| "loss": 3.3833, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.684482371434989e-05, | |
| "loss": 3.1501, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.2432, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.673253986076802e-05, | |
| "loss": 3.2144, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.2464, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.662025600718617e-05, | |
| "loss": 3.1372, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.2496, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.650797215360432e-05, | |
| "loss": 2.728, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2528, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.639568830002246e-05, | |
| "loss": 2.7485, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.628340444644061e-05, | |
| "loss": 2.7431, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2592, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.617112059285875e-05, | |
| "loss": 2.534, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2624, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.605883673927689e-05, | |
| "loss": 2.6129, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2656, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.594655288569504e-05, | |
| "loss": 2.6607, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.2688, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.583426903211319e-05, | |
| "loss": 2.5158, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.572198517853134e-05, | |
| "loss": 2.2633, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2752, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.560970132494948e-05, | |
| "loss": 2.4407, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2784, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.549741747136763e-05, | |
| "loss": 2.1584, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2816, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.538513361778578e-05, | |
| "loss": 2.1552, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.2848, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.527284976420391e-05, | |
| "loss": 1.8968, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.516056591062205e-05, | |
| "loss": 1.9182, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2912, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.50482820570402e-05, | |
| "loss": 1.9413, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.2944, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.493599820345834e-05, | |
| "loss": 1.8959, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.2976, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.482371434987649e-05, | |
| "loss": 1.9225, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3008, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.471143049629464e-05, | |
| "loss": 1.8802, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.459914664271278e-05, | |
| "loss": 1.8896, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3072, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.448686278913093e-05, | |
| "loss": 1.7142, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3104, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.437457893554908e-05, | |
| "loss": 1.6888, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3136, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.426229508196722e-05, | |
| "loss": 1.6177, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3168, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.415001122838537e-05, | |
| "loss": 1.645, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.40377273748035e-05, | |
| "loss": 1.6792, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.3232, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.392544352122165e-05, | |
| "loss": 1.8599, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.3264, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.381315966763979e-05, | |
| "loss": 1.6779, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.3296, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.370087581405794e-05, | |
| "loss": 1.4669, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.3328, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.358859196047609e-05, | |
| "loss": 1.3649, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.347630810689423e-05, | |
| "loss": 1.3987, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.3392, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 9.336402425331238e-05, | |
| "loss": 1.2421, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.3424, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 9.325174039973053e-05, | |
| "loss": 1.3011, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.3456, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.313945654614867e-05, | |
| "loss": 1.2287, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.3488, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.30271726925668e-05, | |
| "loss": 1.1587, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.291488883898496e-05, | |
| "loss": 1.2415, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3552, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.280260498540311e-05, | |
| "loss": 1.1415, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.3584, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 9.269032113182124e-05, | |
| "loss": 1.1941, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.3616, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.25780372782394e-05, | |
| "loss": 1.1668, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.3648, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.246575342465755e-05, | |
| "loss": 1.0887, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 9.235346957107568e-05, | |
| "loss": 1.0604, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.3712, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.224118571749383e-05, | |
| "loss": 0.9446, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.3744, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.212890186391197e-05, | |
| "loss": 0.9737, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.3776, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.201661801033011e-05, | |
| "loss": 0.9654, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.3808, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.190433415674826e-05, | |
| "loss": 1.0253, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.179205030316641e-05, | |
| "loss": 0.8597, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.3872, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 9.167976644958456e-05, | |
| "loss": 0.8202, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.3904, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.15674825960027e-05, | |
| "loss": 0.8386, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.3936, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.145519874242085e-05, | |
| "loss": 0.8318, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.3968, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.1342914888839e-05, | |
| "loss": 0.8571, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.123063103525713e-05, | |
| "loss": 0.8326, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.4032, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.111834718167527e-05, | |
| "loss": 0.7655, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.4064, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.100606332809342e-05, | |
| "loss": 0.747, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.4096, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.089377947451156e-05, | |
| "loss": 0.8671, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.4128, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.078149562092971e-05, | |
| "loss": 0.8758, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 10.000000953674316, | |
| "learning_rate": 9.066921176734786e-05, | |
| "loss": 0.8714, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.4192, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.055692791376601e-05, | |
| "loss": 0.8022, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.4224, | |
| "grad_norm": 8.615378379821777, | |
| "learning_rate": 9.044464406018415e-05, | |
| "loss": 0.7234, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.4256, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.03323602066023e-05, | |
| "loss": 0.7416, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.4288, | |
| "grad_norm": 10.0, | |
| "learning_rate": 9.022007635302045e-05, | |
| "loss": 0.6898, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 9.010779249943859e-05, | |
| "loss": 0.6261, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.4352, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.999550864585672e-05, | |
| "loss": 0.6267, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.4384, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.988322479227488e-05, | |
| "loss": 0.6171, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.4416, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.977094093869301e-05, | |
| "loss": 0.5677, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.4448, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.965865708511116e-05, | |
| "loss": 0.5227, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.954637323152931e-05, | |
| "loss": 0.5775, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.4512, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.943408937794746e-05, | |
| "loss": 0.6166, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.4544, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.93218055243656e-05, | |
| "loss": 0.6165, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.4576, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.920952167078375e-05, | |
| "loss": 0.5849, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.4608, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.909723781720189e-05, | |
| "loss": 0.6439, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.898495396362003e-05, | |
| "loss": 0.5886, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.4672, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.887267011003818e-05, | |
| "loss": 0.5935, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.4704, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.876038625645633e-05, | |
| "loss": 0.5868, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.4736, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.864810240287447e-05, | |
| "loss": 0.5489, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.4768, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.853581854929262e-05, | |
| "loss": 0.5106, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.842353469571077e-05, | |
| "loss": 0.4885, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.4832, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.83112508421289e-05, | |
| "loss": 0.5323, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.4864, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.819896698854705e-05, | |
| "loss": 0.4359, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.4896, | |
| "grad_norm": 7.423900604248047, | |
| "learning_rate": 8.808668313496519e-05, | |
| "loss": 0.4789, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.4928, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.797439928138334e-05, | |
| "loss": 0.4367, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 7.3594489097595215, | |
| "learning_rate": 8.786211542780148e-05, | |
| "loss": 0.465, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.4992, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.774983157421963e-05, | |
| "loss": 0.4244, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.5024, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.763754772063778e-05, | |
| "loss": 0.4311, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.5056, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.752526386705592e-05, | |
| "loss": 0.4787, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.5088, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.741298001347407e-05, | |
| "loss": 0.4777, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.730069615989222e-05, | |
| "loss": 0.469, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5152, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.718841230631036e-05, | |
| "loss": 0.4519, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.5184, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.70761284527285e-05, | |
| "loss": 0.4084, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.5216, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.696384459914664e-05, | |
| "loss": 0.3627, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.5248, | |
| "grad_norm": 8.502776145935059, | |
| "learning_rate": 8.68515607455648e-05, | |
| "loss": 0.3742, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.673927689198293e-05, | |
| "loss": 0.3525, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5312, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.662699303840108e-05, | |
| "loss": 0.3889, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.5344, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.651470918481923e-05, | |
| "loss": 0.5584, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.5376, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.640242533123737e-05, | |
| "loss": 0.3988, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.5408, | |
| "grad_norm": 8.782861709594727, | |
| "learning_rate": 8.629014147765552e-05, | |
| "loss": 0.3439, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 8.174032211303711, | |
| "learning_rate": 8.617785762407367e-05, | |
| "loss": 0.3265, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.5472, | |
| "grad_norm": 4.737403392791748, | |
| "learning_rate": 8.606557377049181e-05, | |
| "loss": 0.329, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.5504, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.595328991690995e-05, | |
| "loss": 0.3093, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.5536, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.58410060633281e-05, | |
| "loss": 0.3119, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.5568, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.572872220974623e-05, | |
| "loss": 0.3227, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 9.882418632507324, | |
| "learning_rate": 8.561643835616438e-05, | |
| "loss": 0.2979, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.5632, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.550415450258253e-05, | |
| "loss": 0.2693, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.5664, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.539187064900069e-05, | |
| "loss": 0.3187, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.5696, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.527958679541882e-05, | |
| "loss": 0.291, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.5728, | |
| "grad_norm": 7.839264869689941, | |
| "learning_rate": 8.516730294183697e-05, | |
| "loss": 0.2833, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 4.611258029937744, | |
| "learning_rate": 8.505501908825511e-05, | |
| "loss": 0.2525, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.5792, | |
| "grad_norm": 4.125186443328857, | |
| "learning_rate": 8.494273523467325e-05, | |
| "loss": 0.2727, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.5824, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.48304513810914e-05, | |
| "loss": 0.2718, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.5856, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.471816752750955e-05, | |
| "loss": 0.2787, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5888, | |
| "grad_norm": 4.507540225982666, | |
| "learning_rate": 8.460588367392769e-05, | |
| "loss": 0.2903, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 8.763723373413086, | |
| "learning_rate": 8.449359982034584e-05, | |
| "loss": 0.2821, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.5952, | |
| "grad_norm": 6.04974365234375, | |
| "learning_rate": 8.438131596676399e-05, | |
| "loss": 0.2469, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.5984, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.426903211318214e-05, | |
| "loss": 0.2389, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.6016, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.415674825960028e-05, | |
| "loss": 0.2749, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.6048, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.404446440601843e-05, | |
| "loss": 0.2829, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 9.99999713897705, | |
| "learning_rate": 8.393218055243656e-05, | |
| "loss": 0.2725, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6112, | |
| "grad_norm": 10.000001907348633, | |
| "learning_rate": 8.38198966988547e-05, | |
| "loss": 0.2698, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.6144, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.370761284527285e-05, | |
| "loss": 0.2678, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.6176, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.3595328991691e-05, | |
| "loss": 0.2741, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.6208, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.348304513810914e-05, | |
| "loss": 0.2475, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.337076128452729e-05, | |
| "loss": 0.2711, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.6272, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.325847743094544e-05, | |
| "loss": 0.2592, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.6304, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.314619357736358e-05, | |
| "loss": 0.2348, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.6336, | |
| "grad_norm": 7.087158679962158, | |
| "learning_rate": 8.303390972378173e-05, | |
| "loss": 0.2185, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.6368, | |
| "grad_norm": 6.557620048522949, | |
| "learning_rate": 8.292162587019986e-05, | |
| "loss": 0.2443, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.280934201661802e-05, | |
| "loss": 0.2012, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.6432, | |
| "grad_norm": 8.828990936279297, | |
| "learning_rate": 8.269705816303615e-05, | |
| "loss": 0.1992, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.6464, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.25847743094543e-05, | |
| "loss": 0.2005, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.6496, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.247249045587245e-05, | |
| "loss": 0.2244, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.6528, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.236020660229059e-05, | |
| "loss": 0.3351, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.224792274870874e-05, | |
| "loss": 0.3391, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.6592, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.213563889512689e-05, | |
| "loss": 0.2319, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.6624, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.202335504154503e-05, | |
| "loss": 0.2222, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.6656, | |
| "grad_norm": 8.043660163879395, | |
| "learning_rate": 8.191107118796317e-05, | |
| "loss": 0.1829, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.6688, | |
| "grad_norm": 5.343007564544678, | |
| "learning_rate": 8.179878733438132e-05, | |
| "loss": 0.1756, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 3.530430316925049, | |
| "learning_rate": 8.168650348079947e-05, | |
| "loss": 0.1959, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.6752, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.15742196272176e-05, | |
| "loss": 0.178, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.6784, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.146193577363576e-05, | |
| "loss": 0.1867, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.6816, | |
| "grad_norm": 8.584357261657715, | |
| "learning_rate": 8.13496519200539e-05, | |
| "loss": 0.1832, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.6848, | |
| "grad_norm": 4.891357421875, | |
| "learning_rate": 8.123736806647204e-05, | |
| "loss": 0.1771, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 5.522593021392822, | |
| "learning_rate": 8.11250842128902e-05, | |
| "loss": 0.1527, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.6912, | |
| "grad_norm": 9.386947631835938, | |
| "learning_rate": 8.101280035930835e-05, | |
| "loss": 0.1769, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.6944, | |
| "grad_norm": 10.0, | |
| "learning_rate": 8.090051650572648e-05, | |
| "loss": 0.1889, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.6976, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.078823265214462e-05, | |
| "loss": 0.1827, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.7008, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.067594879856277e-05, | |
| "loss": 0.2055, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 9.999998092651367, | |
| "learning_rate": 8.056366494498092e-05, | |
| "loss": 0.1802, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.7072, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 8.045138109139906e-05, | |
| "loss": 0.1706, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.7104, | |
| "grad_norm": 3.970720052719116, | |
| "learning_rate": 8.033909723781721e-05, | |
| "loss": 0.1604, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.7136, | |
| "grad_norm": 3.6069769859313965, | |
| "learning_rate": 8.022681338423536e-05, | |
| "loss": 0.1453, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.7168, | |
| "grad_norm": 3.9357783794403076, | |
| "learning_rate": 8.01145295306535e-05, | |
| "loss": 0.1516, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 5.958951950073242, | |
| "learning_rate": 8.000224567707165e-05, | |
| "loss": 0.1512, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.7232, | |
| "grad_norm": 9.320231437683105, | |
| "learning_rate": 7.988996182348978e-05, | |
| "loss": 0.1642, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.7264, | |
| "grad_norm": 4.681884765625, | |
| "learning_rate": 7.977767796990792e-05, | |
| "loss": 0.1613, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.7296, | |
| "grad_norm": 7.769372940063477, | |
| "learning_rate": 7.966539411632607e-05, | |
| "loss": 0.1544, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.7328, | |
| "grad_norm": 4.547751426696777, | |
| "learning_rate": 7.955311026274422e-05, | |
| "loss": 0.1636, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 8.214717864990234, | |
| "learning_rate": 7.944082640916236e-05, | |
| "loss": 0.1372, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.7392, | |
| "grad_norm": 7.887718677520752, | |
| "learning_rate": 7.932854255558051e-05, | |
| "loss": 0.158, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.7424, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 7.921625870199866e-05, | |
| "loss": 0.1611, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.7456, | |
| "grad_norm": 3.0139806270599365, | |
| "learning_rate": 7.910397484841681e-05, | |
| "loss": 0.1584, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.7488, | |
| "grad_norm": 5.308823585510254, | |
| "learning_rate": 7.899169099483495e-05, | |
| "loss": 0.1489, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 1.894464373588562, | |
| "learning_rate": 7.887940714125309e-05, | |
| "loss": 0.1335, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.7552, | |
| "grad_norm": 8.55656909942627, | |
| "learning_rate": 7.876712328767124e-05, | |
| "loss": 0.1363, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.7584, | |
| "grad_norm": 7.630896091461182, | |
| "learning_rate": 7.865483943408937e-05, | |
| "loss": 0.1398, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.7616, | |
| "grad_norm": 9.361071586608887, | |
| "learning_rate": 7.854255558050752e-05, | |
| "loss": 0.1307, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.7648, | |
| "grad_norm": 3.613555431365967, | |
| "learning_rate": 7.843027172692568e-05, | |
| "loss": 0.1461, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 4.020608901977539, | |
| "learning_rate": 7.831798787334381e-05, | |
| "loss": 0.1432, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.7712, | |
| "grad_norm": 6.401773452758789, | |
| "learning_rate": 7.820570401976196e-05, | |
| "loss": 0.1396, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.7744, | |
| "grad_norm": 6.332232475280762, | |
| "learning_rate": 7.809342016618011e-05, | |
| "loss": 0.1355, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.7776, | |
| "grad_norm": 2.399341344833374, | |
| "learning_rate": 7.798113631259825e-05, | |
| "loss": 0.1416, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.7808, | |
| "grad_norm": 5.8365654945373535, | |
| "learning_rate": 7.78688524590164e-05, | |
| "loss": 0.1265, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 10.0, | |
| "learning_rate": 7.775656860543454e-05, | |
| "loss": 0.6647, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.7872, | |
| "grad_norm": 5.435879230499268, | |
| "learning_rate": 7.764428475185269e-05, | |
| "loss": 0.1334, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.7904, | |
| "grad_norm": 10.0, | |
| "learning_rate": 7.753200089827083e-05, | |
| "loss": 0.1284, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.7936, | |
| "grad_norm": 7.202799320220947, | |
| "learning_rate": 7.741971704468898e-05, | |
| "loss": 0.1369, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.7968, | |
| "grad_norm": 2.0989317893981934, | |
| "learning_rate": 7.730743319110713e-05, | |
| "loss": 0.1105, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 4.924466609954834, | |
| "learning_rate": 7.719514933752526e-05, | |
| "loss": 0.1226, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.8032, | |
| "grad_norm": 7.978616714477539, | |
| "learning_rate": 7.708286548394342e-05, | |
| "loss": 0.1089, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.8064, | |
| "grad_norm": 4.150684356689453, | |
| "learning_rate": 7.697058163036157e-05, | |
| "loss": 0.1234, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.8096, | |
| "grad_norm": 4.818097114562988, | |
| "learning_rate": 7.68582977767797e-05, | |
| "loss": 0.1188, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.8128, | |
| "grad_norm": 2.5419697761535645, | |
| "learning_rate": 7.674601392319784e-05, | |
| "loss": 0.113, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 5.721562385559082, | |
| "learning_rate": 7.663373006961599e-05, | |
| "loss": 0.109, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.8192, | |
| "grad_norm": 2.7478086948394775, | |
| "learning_rate": 7.652144621603414e-05, | |
| "loss": 0.101, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.8224, | |
| "grad_norm": 9.692163467407227, | |
| "learning_rate": 7.640916236245228e-05, | |
| "loss": 0.1156, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.8256, | |
| "grad_norm": 6.791487693786621, | |
| "learning_rate": 7.629687850887043e-05, | |
| "loss": 0.12, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.8288, | |
| "grad_norm": 4.655740261077881, | |
| "learning_rate": 7.618459465528858e-05, | |
| "loss": 0.114, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 2.558154344558716, | |
| "learning_rate": 7.607231080170672e-05, | |
| "loss": 0.102, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.8352, | |
| "grad_norm": 5.348006725311279, | |
| "learning_rate": 7.596002694812487e-05, | |
| "loss": 0.156, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.8384, | |
| "grad_norm": 4.152245044708252, | |
| "learning_rate": 7.5847743094543e-05, | |
| "loss": 0.1256, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.8416, | |
| "grad_norm": 6.711794853210449, | |
| "learning_rate": 7.573545924096114e-05, | |
| "loss": 0.1184, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.8448, | |
| "grad_norm": 6.55480432510376, | |
| "learning_rate": 7.562317538737929e-05, | |
| "loss": 0.1253, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 6.035834312438965, | |
| "learning_rate": 7.551089153379744e-05, | |
| "loss": 0.125, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.8512, | |
| "grad_norm": 7.503954887390137, | |
| "learning_rate": 7.53986076802156e-05, | |
| "loss": 0.1068, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.8544, | |
| "grad_norm": 4.4410271644592285, | |
| "learning_rate": 7.528632382663373e-05, | |
| "loss": 0.1137, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.8576, | |
| "grad_norm": 4.127198696136475, | |
| "learning_rate": 7.517403997305188e-05, | |
| "loss": 0.1127, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.8608, | |
| "grad_norm": 3.0780539512634277, | |
| "learning_rate": 7.506175611947003e-05, | |
| "loss": 0.1009, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 8.409754753112793, | |
| "learning_rate": 7.494947226588817e-05, | |
| "loss": 0.1017, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.8672, | |
| "grad_norm": 4.118874549865723, | |
| "learning_rate": 7.483718841230631e-05, | |
| "loss": 0.1, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.8704, | |
| "grad_norm": 2.367293357849121, | |
| "learning_rate": 7.472490455872446e-05, | |
| "loss": 0.1019, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.8736, | |
| "grad_norm": 5.165258884429932, | |
| "learning_rate": 7.46126207051426e-05, | |
| "loss": 0.0891, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.8768, | |
| "grad_norm": 1.9673935174942017, | |
| "learning_rate": 7.450033685156075e-05, | |
| "loss": 0.0869, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 4.972908020019531, | |
| "learning_rate": 7.43880529979789e-05, | |
| "loss": 0.0894, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.8832, | |
| "grad_norm": 4.128866195678711, | |
| "learning_rate": 7.427576914439703e-05, | |
| "loss": 0.0888, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.8864, | |
| "grad_norm": 1.7406138181686401, | |
| "learning_rate": 7.416348529081518e-05, | |
| "loss": 0.0876, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.8896, | |
| "grad_norm": 1.7218992710113525, | |
| "learning_rate": 7.405120143723333e-05, | |
| "loss": 0.0925, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.8928, | |
| "grad_norm": 2.668508768081665, | |
| "learning_rate": 7.393891758365149e-05, | |
| "loss": 0.0872, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 5.266975402832031, | |
| "learning_rate": 7.382663373006962e-05, | |
| "loss": 0.083, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.8992, | |
| "grad_norm": 2.1134185791015625, | |
| "learning_rate": 7.371434987648776e-05, | |
| "loss": 0.0871, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.9024, | |
| "grad_norm": 4.195724010467529, | |
| "learning_rate": 7.360206602290591e-05, | |
| "loss": 0.0941, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.9056, | |
| "grad_norm": 2.4259421825408936, | |
| "learning_rate": 7.348978216932405e-05, | |
| "loss": 0.0937, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.9088, | |
| "grad_norm": 2.7671356201171875, | |
| "learning_rate": 7.33774983157422e-05, | |
| "loss": 0.095, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 6.370889663696289, | |
| "learning_rate": 7.326521446216035e-05, | |
| "loss": 0.0826, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.9152, | |
| "grad_norm": 6.082813739776611, | |
| "learning_rate": 7.315293060857849e-05, | |
| "loss": 0.0891, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.9184, | |
| "grad_norm": 4.761292934417725, | |
| "learning_rate": 7.304064675499664e-05, | |
| "loss": 0.0905, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.9216, | |
| "grad_norm": 3.1539108753204346, | |
| "learning_rate": 7.292836290141479e-05, | |
| "loss": 0.0836, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.9248, | |
| "grad_norm": 3.095581531524658, | |
| "learning_rate": 7.281607904783292e-05, | |
| "loss": 0.0782, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 2.6302435398101807, | |
| "learning_rate": 7.270379519425106e-05, | |
| "loss": 0.0844, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.9312, | |
| "grad_norm": 4.609825134277344, | |
| "learning_rate": 7.259151134066921e-05, | |
| "loss": 0.0773, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.9344, | |
| "grad_norm": 6.823300361633301, | |
| "learning_rate": 7.247922748708736e-05, | |
| "loss": 0.0811, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.9376, | |
| "grad_norm": 2.5662624835968018, | |
| "learning_rate": 7.23669436335055e-05, | |
| "loss": 0.0751, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.9408, | |
| "grad_norm": 1.9695119857788086, | |
| "learning_rate": 7.225465977992365e-05, | |
| "loss": 0.087, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 3.3225550651550293, | |
| "learning_rate": 7.21423759263418e-05, | |
| "loss": 0.0825, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.9472, | |
| "grad_norm": 3.849437713623047, | |
| "learning_rate": 7.203009207275994e-05, | |
| "loss": 0.0767, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.9504, | |
| "grad_norm": 1.750457525253296, | |
| "learning_rate": 7.191780821917809e-05, | |
| "loss": 0.0882, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.9536, | |
| "grad_norm": 6.477813243865967, | |
| "learning_rate": 7.180552436559623e-05, | |
| "loss": 0.0919, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.9568, | |
| "grad_norm": 1.4271601438522339, | |
| "learning_rate": 7.169324051201438e-05, | |
| "loss": 0.0804, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 7.061021327972412, | |
| "learning_rate": 7.158095665843251e-05, | |
| "loss": 0.0832, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.9632, | |
| "grad_norm": 2.1753170490264893, | |
| "learning_rate": 7.146867280485066e-05, | |
| "loss": 0.0741, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.9664, | |
| "grad_norm": 7.894692897796631, | |
| "learning_rate": 7.135638895126882e-05, | |
| "loss": 0.0723, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.9696, | |
| "grad_norm": 2.4842376708984375, | |
| "learning_rate": 7.124410509768695e-05, | |
| "loss": 0.0746, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.9728, | |
| "grad_norm": 4.693914413452148, | |
| "learning_rate": 7.11318212441051e-05, | |
| "loss": 0.0818, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 3.4993011951446533, | |
| "learning_rate": 7.101953739052325e-05, | |
| "loss": 0.0743, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.9792, | |
| "grad_norm": 2.4109108448028564, | |
| "learning_rate": 7.090725353694139e-05, | |
| "loss": 0.0698, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.9824, | |
| "grad_norm": 3.363265037536621, | |
| "learning_rate": 7.079496968335954e-05, | |
| "loss": 0.076, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.9856, | |
| "grad_norm": 4.7013959884643555, | |
| "learning_rate": 7.068268582977768e-05, | |
| "loss": 0.0801, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.9888, | |
| "grad_norm": 1.858520269393921, | |
| "learning_rate": 7.057040197619582e-05, | |
| "loss": 0.0691, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 2.71395206451416, | |
| "learning_rate": 7.045811812261397e-05, | |
| "loss": 0.0811, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.9952, | |
| "grad_norm": 3.254763603210449, | |
| "learning_rate": 7.034583426903212e-05, | |
| "loss": 0.0772, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.9984, | |
| "grad_norm": 6.519776821136475, | |
| "learning_rate": 7.023355041545027e-05, | |
| "loss": 0.0718, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.0016, | |
| "grad_norm": 1.91741144657135, | |
| "learning_rate": 7.01212665618684e-05, | |
| "loss": 0.0746, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.0048, | |
| "grad_norm": 1.5471233129501343, | |
| "learning_rate": 7.000898270828656e-05, | |
| "loss": 0.071, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.008, | |
| "grad_norm": 3.8411548137664795, | |
| "learning_rate": 6.98966988547047e-05, | |
| "loss": 0.0646, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.0112, | |
| "grad_norm": 4.6282267570495605, | |
| "learning_rate": 6.978441500112284e-05, | |
| "loss": 0.068, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.0144, | |
| "grad_norm": 3.1158607006073, | |
| "learning_rate": 6.967213114754098e-05, | |
| "loss": 0.0706, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.0176, | |
| "grad_norm": 4.9777913093566895, | |
| "learning_rate": 6.955984729395913e-05, | |
| "loss": 0.0711, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.0208, | |
| "grad_norm": 2.6763927936553955, | |
| "learning_rate": 6.944756344037727e-05, | |
| "loss": 0.0649, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.024, | |
| "grad_norm": 4.165836334228516, | |
| "learning_rate": 6.933527958679542e-05, | |
| "loss": 0.066, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.0272, | |
| "grad_norm": 9.641294479370117, | |
| "learning_rate": 6.922299573321357e-05, | |
| "loss": 0.068, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.0304, | |
| "grad_norm": 2.3524587154388428, | |
| "learning_rate": 6.911071187963172e-05, | |
| "loss": 0.0695, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.0336, | |
| "grad_norm": 3.930358648300171, | |
| "learning_rate": 6.899842802604986e-05, | |
| "loss": 0.0869, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.0368, | |
| "grad_norm": 3.403099298477173, | |
| "learning_rate": 6.888614417246801e-05, | |
| "loss": 0.0734, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 4.471584796905518, | |
| "learning_rate": 6.877386031888615e-05, | |
| "loss": 0.0729, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.0432, | |
| "grad_norm": 2.041577100753784, | |
| "learning_rate": 6.866157646530428e-05, | |
| "loss": 0.0649, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.0464, | |
| "grad_norm": 4.053502559661865, | |
| "learning_rate": 6.854929261172243e-05, | |
| "loss": 0.068, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.0496, | |
| "grad_norm": 1.712865948677063, | |
| "learning_rate": 6.843700875814058e-05, | |
| "loss": 0.0693, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.0528, | |
| "grad_norm": 8.817410469055176, | |
| "learning_rate": 6.832472490455872e-05, | |
| "loss": 0.073, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.056, | |
| "grad_norm": 1.9689174890518188, | |
| "learning_rate": 6.821244105097687e-05, | |
| "loss": 0.0665, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.0592, | |
| "grad_norm": 4.147378921508789, | |
| "learning_rate": 6.810015719739502e-05, | |
| "loss": 0.0683, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.0624, | |
| "grad_norm": 3.4082727432250977, | |
| "learning_rate": 6.798787334381316e-05, | |
| "loss": 0.0635, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.0656, | |
| "grad_norm": 1.8768893480300903, | |
| "learning_rate": 6.787558949023131e-05, | |
| "loss": 0.0587, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.0688, | |
| "grad_norm": 1.7891249656677246, | |
| "learning_rate": 6.776330563664946e-05, | |
| "loss": 0.0628, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.072, | |
| "grad_norm": 1.5056793689727783, | |
| "learning_rate": 6.76510217830676e-05, | |
| "loss": 0.059, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.0752, | |
| "grad_norm": 2.3811066150665283, | |
| "learning_rate": 6.753873792948574e-05, | |
| "loss": 0.0642, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.0784, | |
| "grad_norm": 4.5819902420043945, | |
| "learning_rate": 6.742645407590389e-05, | |
| "loss": 0.0573, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.0816, | |
| "grad_norm": 3.9442105293273926, | |
| "learning_rate": 6.731417022232204e-05, | |
| "loss": 0.0926, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.0848, | |
| "grad_norm": 4.184739112854004, | |
| "learning_rate": 6.720188636874017e-05, | |
| "loss": 0.0765, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.088, | |
| "grad_norm": 2.208756446838379, | |
| "learning_rate": 6.708960251515832e-05, | |
| "loss": 0.0657, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.0912, | |
| "grad_norm": 1.9726155996322632, | |
| "learning_rate": 6.697731866157648e-05, | |
| "loss": 0.0584, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.0944, | |
| "grad_norm": 2.3560941219329834, | |
| "learning_rate": 6.686503480799461e-05, | |
| "loss": 0.0663, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.0976, | |
| "grad_norm": 4.31378173828125, | |
| "learning_rate": 6.675275095441276e-05, | |
| "loss": 0.0652, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.1008, | |
| "grad_norm": 5.586067199707031, | |
| "learning_rate": 6.66404671008309e-05, | |
| "loss": 0.0619, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.104, | |
| "grad_norm": 4.057238578796387, | |
| "learning_rate": 6.652818324724905e-05, | |
| "loss": 0.0653, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.1072, | |
| "grad_norm": 2.5588834285736084, | |
| "learning_rate": 6.641589939366719e-05, | |
| "loss": 0.0647, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.1104, | |
| "grad_norm": 3.098489761352539, | |
| "learning_rate": 6.630361554008534e-05, | |
| "loss": 0.0604, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.1136, | |
| "grad_norm": 3.6659464836120605, | |
| "learning_rate": 6.619133168650349e-05, | |
| "loss": 0.0665, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.1168, | |
| "grad_norm": 2.530979633331299, | |
| "learning_rate": 6.607904783292163e-05, | |
| "loss": 0.059, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 4.6189374923706055, | |
| "learning_rate": 6.596676397933978e-05, | |
| "loss": 0.062, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.1232, | |
| "grad_norm": 2.125894784927368, | |
| "learning_rate": 6.585448012575793e-05, | |
| "loss": 0.0573, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.1264, | |
| "grad_norm": 3.884368896484375, | |
| "learning_rate": 6.574219627217606e-05, | |
| "loss": 0.0588, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.1296, | |
| "grad_norm": 4.213693141937256, | |
| "learning_rate": 6.56299124185942e-05, | |
| "loss": 0.0648, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 1.1328, | |
| "grad_norm": 4.863856315612793, | |
| "learning_rate": 6.551762856501235e-05, | |
| "loss": 0.0585, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.1360000000000001, | |
| "grad_norm": 2.9554522037506104, | |
| "learning_rate": 6.540534471143049e-05, | |
| "loss": 0.0584, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 1.1392, | |
| "grad_norm": 1.8812214136123657, | |
| "learning_rate": 6.529306085784864e-05, | |
| "loss": 0.0611, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 1.1424, | |
| "grad_norm": 2.52107572555542, | |
| "learning_rate": 6.518077700426679e-05, | |
| "loss": 0.0591, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 1.1456, | |
| "grad_norm": 3.270277500152588, | |
| "learning_rate": 6.506849315068494e-05, | |
| "loss": 0.0622, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 1.1488, | |
| "grad_norm": 2.0023467540740967, | |
| "learning_rate": 6.495620929710308e-05, | |
| "loss": 0.0586, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 1.152, | |
| "grad_norm": 2.0342307090759277, | |
| "learning_rate": 6.484392544352123e-05, | |
| "loss": 0.0517, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.1552, | |
| "grad_norm": 3.3358206748962402, | |
| "learning_rate": 6.473164158993938e-05, | |
| "loss": 0.0568, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 1.1584, | |
| "grad_norm": 1.158366322517395, | |
| "learning_rate": 6.461935773635752e-05, | |
| "loss": 0.0509, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 1.1616, | |
| "grad_norm": 1.7512037754058838, | |
| "learning_rate": 6.450707388277565e-05, | |
| "loss": 0.0642, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 1.1648, | |
| "grad_norm": 1.3442176580429077, | |
| "learning_rate": 6.43947900291938e-05, | |
| "loss": 0.0559, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 1.168, | |
| "grad_norm": 4.689185619354248, | |
| "learning_rate": 6.428250617561194e-05, | |
| "loss": 0.0565, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.1712, | |
| "grad_norm": 3.3459503650665283, | |
| "learning_rate": 6.417022232203009e-05, | |
| "loss": 0.0575, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.1743999999999999, | |
| "grad_norm": 2.5210814476013184, | |
| "learning_rate": 6.405793846844824e-05, | |
| "loss": 0.0534, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 1.1776, | |
| "grad_norm": 1.4273737668991089, | |
| "learning_rate": 6.39456546148664e-05, | |
| "loss": 0.0562, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 1.1808, | |
| "grad_norm": 3.007507562637329, | |
| "learning_rate": 6.383337076128453e-05, | |
| "loss": 0.0531, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 1.184, | |
| "grad_norm": 2.328012228012085, | |
| "learning_rate": 6.372108690770268e-05, | |
| "loss": 0.0521, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 1.1872, | |
| "grad_norm": 3.815755844116211, | |
| "learning_rate": 6.360880305412082e-05, | |
| "loss": 0.0586, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 1.1904, | |
| "grad_norm": 2.570431709289551, | |
| "learning_rate": 6.349651920053896e-05, | |
| "loss": 0.0559, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.1936, | |
| "grad_norm": 2.9417104721069336, | |
| "learning_rate": 6.338423534695711e-05, | |
| "loss": 0.0555, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 1.1968, | |
| "grad_norm": 1.5327764749526978, | |
| "learning_rate": 6.327195149337526e-05, | |
| "loss": 0.0515, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 3.548718214035034, | |
| "learning_rate": 6.31596676397934e-05, | |
| "loss": 0.054, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 1.2032, | |
| "grad_norm": 1.7067848443984985, | |
| "learning_rate": 6.304738378621155e-05, | |
| "loss": 0.0552, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 1.2064, | |
| "grad_norm": 1.4624521732330322, | |
| "learning_rate": 6.29350999326297e-05, | |
| "loss": 0.0459, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 1.2096, | |
| "grad_norm": 4.673206329345703, | |
| "learning_rate": 6.282281607904783e-05, | |
| "loss": 0.0459, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.2128, | |
| "grad_norm": 2.1773905754089355, | |
| "learning_rate": 6.271053222546598e-05, | |
| "loss": 0.0515, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 1.216, | |
| "grad_norm": 4.247639179229736, | |
| "learning_rate": 6.259824837188412e-05, | |
| "loss": 0.0623, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 1.2192, | |
| "grad_norm": 2.148364543914795, | |
| "learning_rate": 6.248596451830227e-05, | |
| "loss": 17.18, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 1.2224, | |
| "grad_norm": 3.752838134765625, | |
| "learning_rate": 6.237368066472041e-05, | |
| "loss": 0.0535, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 1.2256, | |
| "grad_norm": 3.2757108211517334, | |
| "learning_rate": 6.226139681113856e-05, | |
| "loss": 0.0552, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 1.2288000000000001, | |
| "grad_norm": 2.085975408554077, | |
| "learning_rate": 6.214911295755671e-05, | |
| "loss": 0.0495, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.232, | |
| "grad_norm": 1.4556792974472046, | |
| "learning_rate": 6.203682910397485e-05, | |
| "loss": 0.0468, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 1.2352, | |
| "grad_norm": 1.6175802946090698, | |
| "learning_rate": 6.1924545250393e-05, | |
| "loss": 0.0583, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 1.2384, | |
| "grad_norm": 1.6184957027435303, | |
| "learning_rate": 6.181226139681115e-05, | |
| "loss": 0.0486, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 1.2416, | |
| "grad_norm": 9.999999046325684, | |
| "learning_rate": 6.169997754322929e-05, | |
| "loss": 0.0778, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 1.2448, | |
| "grad_norm": 7.507689952850342, | |
| "learning_rate": 6.158769368964744e-05, | |
| "loss": 0.0771, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 1.248, | |
| "grad_norm": 4.089766502380371, | |
| "learning_rate": 6.147540983606557e-05, | |
| "loss": 0.1067, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.2511999999999999, | |
| "grad_norm": 2.4886679649353027, | |
| "learning_rate": 6.136312598248372e-05, | |
| "loss": 0.069, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 1.2544, | |
| "grad_norm": 3.568296432495117, | |
| "learning_rate": 6.125084212890186e-05, | |
| "loss": 0.0643, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 1.2576, | |
| "grad_norm": 2.1102867126464844, | |
| "learning_rate": 6.113855827532001e-05, | |
| "loss": 0.0568, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 1.2608, | |
| "grad_norm": 1.3482815027236938, | |
| "learning_rate": 6.102627442173816e-05, | |
| "loss": 0.0591, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 1.264, | |
| "grad_norm": 2.092593193054199, | |
| "learning_rate": 6.09139905681563e-05, | |
| "loss": 0.0576, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 1.2671999999999999, | |
| "grad_norm": 2.106621503829956, | |
| "learning_rate": 6.0801706714574444e-05, | |
| "loss": 0.0484, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.2704, | |
| "grad_norm": 2.036658525466919, | |
| "learning_rate": 6.0689422860992595e-05, | |
| "loss": 0.0433, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 1.2736, | |
| "grad_norm": 1.402249813079834, | |
| "learning_rate": 6.057713900741073e-05, | |
| "loss": 0.0449, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 1.2768, | |
| "grad_norm": 2.1640737056732178, | |
| "learning_rate": 6.046485515382888e-05, | |
| "loss": 0.0482, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 2.2044899463653564, | |
| "learning_rate": 6.0352571300247026e-05, | |
| "loss": 0.0463, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.2832, | |
| "grad_norm": 7.269384384155273, | |
| "learning_rate": 6.024028744666518e-05, | |
| "loss": 0.0549, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 1.2864, | |
| "grad_norm": 3.0673646926879883, | |
| "learning_rate": 6.0128003593083314e-05, | |
| "loss": 0.0522, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.2896, | |
| "grad_norm": 5.095019340515137, | |
| "learning_rate": 6.0015719739501465e-05, | |
| "loss": 0.0471, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 1.2928, | |
| "grad_norm": 3.6901588439941406, | |
| "learning_rate": 5.9903435885919615e-05, | |
| "loss": 0.0483, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 1.296, | |
| "grad_norm": 1.2914890050888062, | |
| "learning_rate": 5.979115203233775e-05, | |
| "loss": 0.0456, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 1.2992, | |
| "grad_norm": 2.4384307861328125, | |
| "learning_rate": 5.9678868178755896e-05, | |
| "loss": 0.0475, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 1.3024, | |
| "grad_norm": 1.596550703048706, | |
| "learning_rate": 5.956658432517405e-05, | |
| "loss": 0.0456, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 1.3056, | |
| "grad_norm": 1.6937170028686523, | |
| "learning_rate": 5.9454300471592184e-05, | |
| "loss": 0.0517, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.3088, | |
| "grad_norm": 1.587768316268921, | |
| "learning_rate": 5.9342016618010335e-05, | |
| "loss": 0.0437, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 1.312, | |
| "grad_norm": 4.193778991699219, | |
| "learning_rate": 5.922973276442848e-05, | |
| "loss": 0.0457, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.3152, | |
| "grad_norm": 2.494598865509033, | |
| "learning_rate": 5.9117448910846616e-05, | |
| "loss": 0.0479, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 1.3184, | |
| "grad_norm": 2.869279623031616, | |
| "learning_rate": 5.900516505726477e-05, | |
| "loss": 0.0433, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 1.3216, | |
| "grad_norm": 4.543278217315674, | |
| "learning_rate": 5.889288120368292e-05, | |
| "loss": 0.0519, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 1.3248, | |
| "grad_norm": 1.5083669424057007, | |
| "learning_rate": 5.878059735010106e-05, | |
| "loss": 0.0492, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.328, | |
| "grad_norm": 2.1934690475463867, | |
| "learning_rate": 5.86683134965192e-05, | |
| "loss": 0.0426, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 1.3312, | |
| "grad_norm": 2.10366153717041, | |
| "learning_rate": 5.855602964293735e-05, | |
| "loss": 0.0441, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 1.3344, | |
| "grad_norm": 1.6721271276474, | |
| "learning_rate": 5.84437457893555e-05, | |
| "loss": 0.0437, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 1.3376000000000001, | |
| "grad_norm": 4.587328910827637, | |
| "learning_rate": 5.833146193577364e-05, | |
| "loss": 0.0432, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 1.3408, | |
| "grad_norm": 2.2152934074401855, | |
| "learning_rate": 5.821917808219178e-05, | |
| "loss": 0.053, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 1.3439999999999999, | |
| "grad_norm": 3.132293224334717, | |
| "learning_rate": 5.810689422860993e-05, | |
| "loss": 0.05, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.3472, | |
| "grad_norm": 3.02851939201355, | |
| "learning_rate": 5.799461037502807e-05, | |
| "loss": 0.0408, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 1.3504, | |
| "grad_norm": 2.1754748821258545, | |
| "learning_rate": 5.788232652144622e-05, | |
| "loss": 0.046, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 1.3536000000000001, | |
| "grad_norm": 2.0088369846343994, | |
| "learning_rate": 5.777004266786436e-05, | |
| "loss": 0.0428, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 1.3568, | |
| "grad_norm": 2.7489728927612305, | |
| "learning_rate": 5.7657758814282514e-05, | |
| "loss": 0.0424, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 1.6788060665130615, | |
| "learning_rate": 5.754547496070065e-05, | |
| "loss": 0.0448, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 1.3632, | |
| "grad_norm": 2.4949731826782227, | |
| "learning_rate": 5.74331911071188e-05, | |
| "loss": 0.041, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.3664, | |
| "grad_norm": 1.9527579545974731, | |
| "learning_rate": 5.7320907253536945e-05, | |
| "loss": 0.0441, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 1.3696, | |
| "grad_norm": 2.471604108810425, | |
| "learning_rate": 5.720862339995508e-05, | |
| "loss": 0.043, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 1.3728, | |
| "grad_norm": 4.944948196411133, | |
| "learning_rate": 5.709633954637323e-05, | |
| "loss": 0.0562, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 1.376, | |
| "grad_norm": 2.4510498046875, | |
| "learning_rate": 5.6984055692791384e-05, | |
| "loss": 0.0469, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.3792, | |
| "grad_norm": 1.1143766641616821, | |
| "learning_rate": 5.687177183920952e-05, | |
| "loss": 0.0428, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.3824, | |
| "grad_norm": 1.719165325164795, | |
| "learning_rate": 5.675948798562767e-05, | |
| "loss": 0.0448, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.3856, | |
| "grad_norm": 2.277461528778076, | |
| "learning_rate": 5.6647204132045816e-05, | |
| "loss": 0.0417, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.3888, | |
| "grad_norm": 3.449477195739746, | |
| "learning_rate": 5.653492027846395e-05, | |
| "loss": 0.038, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.392, | |
| "grad_norm": 2.189485549926758, | |
| "learning_rate": 5.6422636424882103e-05, | |
| "loss": 0.0396, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.3952, | |
| "grad_norm": 1.732558012008667, | |
| "learning_rate": 5.6310352571300254e-05, | |
| "loss": 0.0398, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.3984, | |
| "grad_norm": 2.732851266860962, | |
| "learning_rate": 5.61980687177184e-05, | |
| "loss": 0.0433, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.4016, | |
| "grad_norm": 1.4708316326141357, | |
| "learning_rate": 5.6085784864136535e-05, | |
| "loss": 0.0361, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.4048, | |
| "grad_norm": 2.114934206008911, | |
| "learning_rate": 5.5973501010554686e-05, | |
| "loss": 0.0438, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.408, | |
| "grad_norm": 4.473976135253906, | |
| "learning_rate": 5.5861217156972837e-05, | |
| "loss": 0.0421, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.4112, | |
| "grad_norm": 1.1676849126815796, | |
| "learning_rate": 5.5748933303390974e-05, | |
| "loss": 0.0437, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.4144, | |
| "grad_norm": 2.1100847721099854, | |
| "learning_rate": 5.563664944980912e-05, | |
| "loss": 0.0351, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.4176, | |
| "grad_norm": 2.426988124847412, | |
| "learning_rate": 5.552436559622727e-05, | |
| "loss": 0.0456, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.4208, | |
| "grad_norm": 3.200514078140259, | |
| "learning_rate": 5.5412081742645405e-05, | |
| "loss": 0.0381, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.424, | |
| "grad_norm": 1.525909423828125, | |
| "learning_rate": 5.5299797889063556e-05, | |
| "loss": 0.0414, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.4272, | |
| "grad_norm": 1.2169736623764038, | |
| "learning_rate": 5.51875140354817e-05, | |
| "loss": 0.0407, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.4304000000000001, | |
| "grad_norm": 4.590202808380127, | |
| "learning_rate": 5.507523018189985e-05, | |
| "loss": 0.0476, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.4336, | |
| "grad_norm": 1.4722408056259155, | |
| "learning_rate": 5.496294632831799e-05, | |
| "loss": 0.052, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.4368, | |
| "grad_norm": 1.351094126701355, | |
| "learning_rate": 5.485066247473614e-05, | |
| "loss": 0.0459, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 1.2844514846801758, | |
| "learning_rate": 5.473837862115428e-05, | |
| "loss": 0.0446, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.4432, | |
| "grad_norm": 0.783967912197113, | |
| "learning_rate": 5.462609476757242e-05, | |
| "loss": 0.0392, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.4464000000000001, | |
| "grad_norm": 2.0353965759277344, | |
| "learning_rate": 5.451381091399057e-05, | |
| "loss": 0.0407, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.4496, | |
| "grad_norm": 1.3156319856643677, | |
| "learning_rate": 5.440152706040872e-05, | |
| "loss": 0.0381, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.4527999999999999, | |
| "grad_norm": 1.4378070831298828, | |
| "learning_rate": 5.428924320682686e-05, | |
| "loss": 0.0353, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.456, | |
| "grad_norm": 3.0744736194610596, | |
| "learning_rate": 5.4176959353245e-05, | |
| "loss": 0.0348, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.4592, | |
| "grad_norm": 1.6298600435256958, | |
| "learning_rate": 5.406467549966315e-05, | |
| "loss": 0.0402, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.4624, | |
| "grad_norm": 2.6494648456573486, | |
| "learning_rate": 5.395239164608129e-05, | |
| "loss": 0.0365, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.4656, | |
| "grad_norm": 1.653573751449585, | |
| "learning_rate": 5.384010779249944e-05, | |
| "loss": 0.0363, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.4687999999999999, | |
| "grad_norm": 1.7645295858383179, | |
| "learning_rate": 5.3727823938917584e-05, | |
| "loss": 0.0393, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.472, | |
| "grad_norm": 4.306888580322266, | |
| "learning_rate": 5.3615540085335735e-05, | |
| "loss": 0.0406, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.4752, | |
| "grad_norm": 3.1965126991271973, | |
| "learning_rate": 5.350325623175387e-05, | |
| "loss": 0.0358, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.4784, | |
| "grad_norm": 1.2502505779266357, | |
| "learning_rate": 5.339097237817202e-05, | |
| "loss": 0.0396, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.4816, | |
| "grad_norm": 2.091470718383789, | |
| "learning_rate": 5.327868852459017e-05, | |
| "loss": 0.035, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.4848, | |
| "grad_norm": 1.1398119926452637, | |
| "learning_rate": 5.316640467100831e-05, | |
| "loss": 0.0428, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.488, | |
| "grad_norm": 2.7160258293151855, | |
| "learning_rate": 5.3054120817426454e-05, | |
| "loss": 0.0353, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.4912, | |
| "grad_norm": 0.9645196199417114, | |
| "learning_rate": 5.2941836963844605e-05, | |
| "loss": 0.0415, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.4944, | |
| "grad_norm": 4.158140182495117, | |
| "learning_rate": 5.282955311026274e-05, | |
| "loss": 0.036, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.4976, | |
| "grad_norm": 1.1706950664520264, | |
| "learning_rate": 5.271726925668089e-05, | |
| "loss": 0.0376, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.5008, | |
| "grad_norm": 1.1529676914215088, | |
| "learning_rate": 5.260498540309904e-05, | |
| "loss": 0.0374, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.504, | |
| "grad_norm": 1.944616675376892, | |
| "learning_rate": 5.249270154951719e-05, | |
| "loss": 0.037, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.5072, | |
| "grad_norm": 1.7939577102661133, | |
| "learning_rate": 5.2380417695935325e-05, | |
| "loss": 0.0376, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.5104, | |
| "grad_norm": 2.588820695877075, | |
| "learning_rate": 5.2268133842353475e-05, | |
| "loss": 0.0345, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.5135999999999998, | |
| "grad_norm": 1.099802017211914, | |
| "learning_rate": 5.215584998877162e-05, | |
| "loss": 0.0355, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.5168, | |
| "grad_norm": 1.9600342512130737, | |
| "learning_rate": 5.2043566135189756e-05, | |
| "loss": 0.036, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 3.2066643238067627, | |
| "learning_rate": 5.193128228160791e-05, | |
| "loss": 0.0413, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.5232, | |
| "grad_norm": 4.0401811599731445, | |
| "learning_rate": 5.181899842802606e-05, | |
| "loss": 0.036, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.5264, | |
| "grad_norm": 1.673994541168213, | |
| "learning_rate": 5.1706714574444195e-05, | |
| "loss": 0.0366, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.5295999999999998, | |
| "grad_norm": 4.127200126647949, | |
| "learning_rate": 5.159443072086234e-05, | |
| "loss": 0.0375, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.5328, | |
| "grad_norm": 2.1488308906555176, | |
| "learning_rate": 5.148214686728049e-05, | |
| "loss": 0.0364, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.536, | |
| "grad_norm": 3.2764744758605957, | |
| "learning_rate": 5.136986301369864e-05, | |
| "loss": 0.0366, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.5392000000000001, | |
| "grad_norm": 4.183980941772461, | |
| "learning_rate": 5.125757916011678e-05, | |
| "loss": 0.0387, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.5424, | |
| "grad_norm": 2.350937604904175, | |
| "learning_rate": 5.114529530653492e-05, | |
| "loss": 0.0376, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.5455999999999999, | |
| "grad_norm": 1.4431155920028687, | |
| "learning_rate": 5.103301145295307e-05, | |
| "loss": 0.0378, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.5488, | |
| "grad_norm": 2.455113172531128, | |
| "learning_rate": 5.092072759937121e-05, | |
| "loss": 0.0352, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.552, | |
| "grad_norm": 2.4375059604644775, | |
| "learning_rate": 5.080844374578936e-05, | |
| "loss": 0.0388, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.5552000000000001, | |
| "grad_norm": 1.8249945640563965, | |
| "learning_rate": 5.0696159892207503e-05, | |
| "loss": 0.0352, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.5584, | |
| "grad_norm": 2.562242031097412, | |
| "learning_rate": 5.058387603862564e-05, | |
| "loss": 0.0366, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.5615999999999999, | |
| "grad_norm": 1.7993731498718262, | |
| "learning_rate": 5.047159218504379e-05, | |
| "loss": 0.0381, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.5648, | |
| "grad_norm": 1.6024712324142456, | |
| "learning_rate": 5.035930833146194e-05, | |
| "loss": 0.0342, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.568, | |
| "grad_norm": 1.0818573236465454, | |
| "learning_rate": 5.024702447788008e-05, | |
| "loss": 0.0355, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.5712000000000002, | |
| "grad_norm": 0.9753634929656982, | |
| "learning_rate": 5.013474062429823e-05, | |
| "loss": 0.0381, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.5744, | |
| "grad_norm": 1.9120144844055176, | |
| "learning_rate": 5.0022456770716374e-05, | |
| "loss": 0.0341, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.5776, | |
| "grad_norm": 2.293750762939453, | |
| "learning_rate": 4.991017291713452e-05, | |
| "loss": 0.0377, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.5808, | |
| "grad_norm": 3.1085097789764404, | |
| "learning_rate": 4.979788906355267e-05, | |
| "loss": 0.0343, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.584, | |
| "grad_norm": 1.3106160163879395, | |
| "learning_rate": 4.968560520997081e-05, | |
| "loss": 0.036, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.5872000000000002, | |
| "grad_norm": 2.5809476375579834, | |
| "learning_rate": 4.957332135638895e-05, | |
| "loss": 0.0341, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.5904, | |
| "grad_norm": 1.423450231552124, | |
| "learning_rate": 4.94610375028071e-05, | |
| "loss": 0.0306, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.5936, | |
| "grad_norm": 3.155231475830078, | |
| "learning_rate": 4.9348753649225244e-05, | |
| "loss": 0.0339, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.5968, | |
| "grad_norm": 1.6214268207550049, | |
| "learning_rate": 4.923646979564339e-05, | |
| "loss": 0.0369, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 3.8274025917053223, | |
| "learning_rate": 4.912418594206153e-05, | |
| "loss": 0.0398, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.6032, | |
| "grad_norm": 3.3754544258117676, | |
| "learning_rate": 4.9011902088479676e-05, | |
| "loss": 0.0347, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.6064, | |
| "grad_norm": 2.3379604816436768, | |
| "learning_rate": 4.8899618234897826e-05, | |
| "loss": 0.0335, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.6096, | |
| "grad_norm": 1.3889174461364746, | |
| "learning_rate": 4.878733438131597e-05, | |
| "loss": 0.0327, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.6128, | |
| "grad_norm": 2.985076427459717, | |
| "learning_rate": 4.8675050527734114e-05, | |
| "loss": 0.0362, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.616, | |
| "grad_norm": 1.5555957555770874, | |
| "learning_rate": 4.856276667415226e-05, | |
| "loss": 0.0365, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.6192, | |
| "grad_norm": 2.723813533782959, | |
| "learning_rate": 4.84504828205704e-05, | |
| "loss": 0.0384, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.6223999999999998, | |
| "grad_norm": 1.3659981489181519, | |
| "learning_rate": 4.833819896698855e-05, | |
| "loss": 0.0336, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.6256, | |
| "grad_norm": 3.0281941890716553, | |
| "learning_rate": 4.8225915113406696e-05, | |
| "loss": 0.0307, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.6288, | |
| "grad_norm": 1.122520089149475, | |
| "learning_rate": 4.811363125982484e-05, | |
| "loss": 0.0326, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.6320000000000001, | |
| "grad_norm": 2.31559681892395, | |
| "learning_rate": 4.8001347406242984e-05, | |
| "loss": 0.0331, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.6352, | |
| "grad_norm": 1.262046217918396, | |
| "learning_rate": 4.788906355266113e-05, | |
| "loss": 0.0375, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.6383999999999999, | |
| "grad_norm": 2.673083782196045, | |
| "learning_rate": 4.777677969907928e-05, | |
| "loss": 0.0344, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.6416, | |
| "grad_norm": 5.857226371765137, | |
| "learning_rate": 4.766449584549742e-05, | |
| "loss": 0.0434, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.6448, | |
| "grad_norm": 2.3795230388641357, | |
| "learning_rate": 4.755221199191556e-05, | |
| "loss": 0.0477, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.6480000000000001, | |
| "grad_norm": 2.1502673625946045, | |
| "learning_rate": 4.743992813833371e-05, | |
| "loss": 0.0524, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.6512, | |
| "grad_norm": 1.4233574867248535, | |
| "learning_rate": 4.7327644284751854e-05, | |
| "loss": 0.0429, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.6543999999999999, | |
| "grad_norm": 2.0955042839050293, | |
| "learning_rate": 4.7215360431170005e-05, | |
| "loss": 0.0345, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.6576, | |
| "grad_norm": 2.341566562652588, | |
| "learning_rate": 4.710307657758814e-05, | |
| "loss": 0.0351, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.6608, | |
| "grad_norm": 3.160850763320923, | |
| "learning_rate": 4.6990792724006286e-05, | |
| "loss": 0.0344, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.6640000000000001, | |
| "grad_norm": 2.272794485092163, | |
| "learning_rate": 4.687850887042444e-05, | |
| "loss": 0.0331, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.6672, | |
| "grad_norm": 1.2717911005020142, | |
| "learning_rate": 4.676622501684258e-05, | |
| "loss": 0.0335, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.6703999999999999, | |
| "grad_norm": 2.4570579528808594, | |
| "learning_rate": 4.6653941163260725e-05, | |
| "loss": 0.0319, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.6736, | |
| "grad_norm": 1.8033403158187866, | |
| "learning_rate": 4.654165730967887e-05, | |
| "loss": 0.0343, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.6768, | |
| "grad_norm": 1.5098129510879517, | |
| "learning_rate": 4.642937345609701e-05, | |
| "loss": 0.032, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 1.48603355884552, | |
| "learning_rate": 4.631708960251516e-05, | |
| "loss": 0.0327, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.6832, | |
| "grad_norm": 1.4422051906585693, | |
| "learning_rate": 4.620480574893331e-05, | |
| "loss": 0.0353, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.6864, | |
| "grad_norm": 1.18756103515625, | |
| "learning_rate": 4.609252189535145e-05, | |
| "loss": 0.0321, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.6896, | |
| "grad_norm": 1.1758790016174316, | |
| "learning_rate": 4.5980238041769595e-05, | |
| "loss": 0.0352, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.6928, | |
| "grad_norm": 1.2466790676116943, | |
| "learning_rate": 4.586795418818774e-05, | |
| "loss": 0.03, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.696, | |
| "grad_norm": 1.9264726638793945, | |
| "learning_rate": 4.575567033460589e-05, | |
| "loss": 0.0314, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.6992, | |
| "grad_norm": 1.9639238119125366, | |
| "learning_rate": 4.564338648102403e-05, | |
| "loss": 0.0328, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.7024, | |
| "grad_norm": 2.070943832397461, | |
| "learning_rate": 4.553110262744217e-05, | |
| "loss": 0.0347, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.7056, | |
| "grad_norm": 2.0837361812591553, | |
| "learning_rate": 4.541881877386032e-05, | |
| "loss": 0.0333, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.7088, | |
| "grad_norm": 1.9007681608200073, | |
| "learning_rate": 4.5306534920278465e-05, | |
| "loss": 0.0323, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.712, | |
| "grad_norm": 4.6805830001831055, | |
| "learning_rate": 4.5194251066696616e-05, | |
| "loss": 0.0341, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.7151999999999998, | |
| "grad_norm": 1.4064592123031616, | |
| "learning_rate": 4.508196721311476e-05, | |
| "loss": 0.0288, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.7184, | |
| "grad_norm": 3.4108219146728516, | |
| "learning_rate": 4.4969683359532897e-05, | |
| "loss": 0.0303, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.7216, | |
| "grad_norm": 0.9329103827476501, | |
| "learning_rate": 4.485739950595105e-05, | |
| "loss": 0.0314, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.7248, | |
| "grad_norm": 1.681719183921814, | |
| "learning_rate": 4.474511565236919e-05, | |
| "loss": 0.0284, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.728, | |
| "grad_norm": 0.932102382183075, | |
| "learning_rate": 4.463283179878734e-05, | |
| "loss": 0.0299, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.7311999999999999, | |
| "grad_norm": 2.7038533687591553, | |
| "learning_rate": 4.452054794520548e-05, | |
| "loss": 0.0329, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.7344, | |
| "grad_norm": 1.4268467426300049, | |
| "learning_rate": 4.440826409162362e-05, | |
| "loss": 0.0303, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.7376, | |
| "grad_norm": 1.9577819108963013, | |
| "learning_rate": 4.4295980238041774e-05, | |
| "loss": 0.0326, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.7408000000000001, | |
| "grad_norm": 2.5221760272979736, | |
| "learning_rate": 4.418369638445992e-05, | |
| "loss": 0.0325, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.744, | |
| "grad_norm": 3.2790379524230957, | |
| "learning_rate": 4.407141253087806e-05, | |
| "loss": 0.0295, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.7471999999999999, | |
| "grad_norm": 0.8850557208061218, | |
| "learning_rate": 4.3959128677296205e-05, | |
| "loss": 0.0307, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.7504, | |
| "grad_norm": 1.8326717615127563, | |
| "learning_rate": 4.384684482371435e-05, | |
| "loss": 0.0289, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.7536, | |
| "grad_norm": 1.3088713884353638, | |
| "learning_rate": 4.37345609701325e-05, | |
| "loss": 0.0292, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.7568000000000001, | |
| "grad_norm": 1.0117987394332886, | |
| "learning_rate": 4.3622277116550644e-05, | |
| "loss": 0.0311, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 2.7869484424591064, | |
| "learning_rate": 4.350999326296879e-05, | |
| "loss": 0.0278, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.7631999999999999, | |
| "grad_norm": 1.3128660917282104, | |
| "learning_rate": 4.339770940938693e-05, | |
| "loss": 0.0278, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.7664, | |
| "grad_norm": 1.1051273345947266, | |
| "learning_rate": 4.3285425555805075e-05, | |
| "loss": 0.03, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.7696, | |
| "grad_norm": 1.3793776035308838, | |
| "learning_rate": 4.3173141702223226e-05, | |
| "loss": 0.0306, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.7728000000000002, | |
| "grad_norm": 2.992269515991211, | |
| "learning_rate": 4.306085784864137e-05, | |
| "loss": 0.0299, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.776, | |
| "grad_norm": 1.6625593900680542, | |
| "learning_rate": 4.294857399505951e-05, | |
| "loss": 0.0356, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.7792, | |
| "grad_norm": 1.0821179151535034, | |
| "learning_rate": 4.283629014147766e-05, | |
| "loss": 0.0349, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.7824, | |
| "grad_norm": 1.0884099006652832, | |
| "learning_rate": 4.27240062878958e-05, | |
| "loss": 0.0313, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.7856, | |
| "grad_norm": 1.5417124032974243, | |
| "learning_rate": 4.261172243431395e-05, | |
| "loss": 0.0306, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.7888, | |
| "grad_norm": 2.5081472396850586, | |
| "learning_rate": 4.249943858073209e-05, | |
| "loss": 0.0329, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.792, | |
| "grad_norm": 0.9784805178642273, | |
| "learning_rate": 4.2387154727150233e-05, | |
| "loss": 0.0331, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.7952, | |
| "grad_norm": 2.7062466144561768, | |
| "learning_rate": 4.2274870873568384e-05, | |
| "loss": 0.0295, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.7984, | |
| "grad_norm": 2.028097629547119, | |
| "learning_rate": 4.216258701998653e-05, | |
| "loss": 0.0294, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.8016, | |
| "grad_norm": 0.9252501130104065, | |
| "learning_rate": 4.205030316640468e-05, | |
| "loss": 0.0282, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.8048, | |
| "grad_norm": 1.6614617109298706, | |
| "learning_rate": 4.1938019312822816e-05, | |
| "loss": 0.029, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.808, | |
| "grad_norm": 1.5624871253967285, | |
| "learning_rate": 4.182573545924096e-05, | |
| "loss": 0.0286, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.8112, | |
| "grad_norm": 1.7454490661621094, | |
| "learning_rate": 4.171345160565911e-05, | |
| "loss": 0.0299, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.8144, | |
| "grad_norm": 1.7463639974594116, | |
| "learning_rate": 4.1601167752077254e-05, | |
| "loss": 0.0296, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.8176, | |
| "grad_norm": 1.4736864566802979, | |
| "learning_rate": 4.14888838984954e-05, | |
| "loss": 0.0317, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.8208, | |
| "grad_norm": 1.21503484249115, | |
| "learning_rate": 4.137660004491354e-05, | |
| "loss": 0.0293, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.8239999999999998, | |
| "grad_norm": 3.489445209503174, | |
| "learning_rate": 4.1264316191331686e-05, | |
| "loss": 0.0305, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.8272, | |
| "grad_norm": 1.21157705783844, | |
| "learning_rate": 4.115203233774984e-05, | |
| "loss": 0.03, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.8304, | |
| "grad_norm": 2.4874978065490723, | |
| "learning_rate": 4.103974848416798e-05, | |
| "loss": 0.0288, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.8336000000000001, | |
| "grad_norm": 1.4586814641952515, | |
| "learning_rate": 4.092746463058612e-05, | |
| "loss": 0.0354, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.8368, | |
| "grad_norm": 1.2466408014297485, | |
| "learning_rate": 4.081518077700427e-05, | |
| "loss": 0.0345, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 1.0634334087371826, | |
| "learning_rate": 4.070289692342241e-05, | |
| "loss": 0.0281, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.8432, | |
| "grad_norm": 1.6378570795059204, | |
| "learning_rate": 4.059061306984056e-05, | |
| "loss": 0.0311, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.8464, | |
| "grad_norm": 1.5089457035064697, | |
| "learning_rate": 4.047832921625871e-05, | |
| "loss": 0.0298, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.8496000000000001, | |
| "grad_norm": 2.608259677886963, | |
| "learning_rate": 4.0366045362676844e-05, | |
| "loss": 0.029, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.8528, | |
| "grad_norm": 1.7564635276794434, | |
| "learning_rate": 4.0253761509094995e-05, | |
| "loss": 0.0268, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.8559999999999999, | |
| "grad_norm": 1.1659057140350342, | |
| "learning_rate": 4.014147765551314e-05, | |
| "loss": 0.0274, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.8592, | |
| "grad_norm": 2.8701725006103516, | |
| "learning_rate": 4.002919380193129e-05, | |
| "loss": 0.0272, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.8624, | |
| "grad_norm": 1.034717321395874, | |
| "learning_rate": 3.9916909948349426e-05, | |
| "loss": 0.0279, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.8656000000000001, | |
| "grad_norm": 1.3043538331985474, | |
| "learning_rate": 3.980462609476757e-05, | |
| "loss": 0.0307, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.8688, | |
| "grad_norm": 2.0375423431396484, | |
| "learning_rate": 3.969234224118572e-05, | |
| "loss": 0.0258, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.8719999999999999, | |
| "grad_norm": 2.1126816272735596, | |
| "learning_rate": 3.9580058387603865e-05, | |
| "loss": 0.0308, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.8752, | |
| "grad_norm": 1.7014706134796143, | |
| "learning_rate": 3.946777453402201e-05, | |
| "loss": 0.0282, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.8784, | |
| "grad_norm": 2.7476634979248047, | |
| "learning_rate": 3.935549068044015e-05, | |
| "loss": 0.0284, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.8816000000000002, | |
| "grad_norm": 1.6362510919570923, | |
| "learning_rate": 3.9243206826858297e-05, | |
| "loss": 0.0242, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.8848, | |
| "grad_norm": 1.3239845037460327, | |
| "learning_rate": 3.913092297327645e-05, | |
| "loss": 0.0361, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.888, | |
| "grad_norm": 1.4442349672317505, | |
| "learning_rate": 3.901863911969459e-05, | |
| "loss": 0.0277, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.8912, | |
| "grad_norm": 1.9677876234054565, | |
| "learning_rate": 3.8906355266112735e-05, | |
| "loss": 0.0258, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.8944, | |
| "grad_norm": 2.842235803604126, | |
| "learning_rate": 3.879407141253088e-05, | |
| "loss": 0.0298, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.8976, | |
| "grad_norm": 1.5647774934768677, | |
| "learning_rate": 3.868178755894902e-05, | |
| "loss": 0.0292, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.9008, | |
| "grad_norm": 1.7668209075927734, | |
| "learning_rate": 3.8569503705367174e-05, | |
| "loss": 0.0357, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.904, | |
| "grad_norm": 2.651283025741577, | |
| "learning_rate": 3.845721985178532e-05, | |
| "loss": 0.0302, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.9072, | |
| "grad_norm": 5.3611040115356445, | |
| "learning_rate": 3.834493599820346e-05, | |
| "loss": 0.0304, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.9104, | |
| "grad_norm": 5.584418773651123, | |
| "learning_rate": 3.8232652144621605e-05, | |
| "loss": 0.0284, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.9136, | |
| "grad_norm": 3.7711610794067383, | |
| "learning_rate": 3.812036829103975e-05, | |
| "loss": 0.0297, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.9167999999999998, | |
| "grad_norm": 0.7803456783294678, | |
| "learning_rate": 3.80080844374579e-05, | |
| "loss": 0.0289, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 1.1393859386444092, | |
| "learning_rate": 3.789580058387604e-05, | |
| "loss": 0.0247, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.9232, | |
| "grad_norm": 1.7076109647750854, | |
| "learning_rate": 3.778351673029418e-05, | |
| "loss": 0.0276, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.9264000000000001, | |
| "grad_norm": 1.2361043691635132, | |
| "learning_rate": 3.767123287671233e-05, | |
| "loss": 0.0277, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.9296, | |
| "grad_norm": 1.1116836071014404, | |
| "learning_rate": 3.7558949023130475e-05, | |
| "loss": 0.0279, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.9327999999999999, | |
| "grad_norm": 2.2229862213134766, | |
| "learning_rate": 3.744666516954862e-05, | |
| "loss": 0.0284, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.936, | |
| "grad_norm": 1.5508464574813843, | |
| "learning_rate": 3.733438131596676e-05, | |
| "loss": 0.0306, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.9392, | |
| "grad_norm": 1.7325248718261719, | |
| "learning_rate": 3.722209746238491e-05, | |
| "loss": 0.0308, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.9424000000000001, | |
| "grad_norm": 1.0399812459945679, | |
| "learning_rate": 3.710981360880306e-05, | |
| "loss": 0.0261, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.9456, | |
| "grad_norm": 2.994081974029541, | |
| "learning_rate": 3.69975297552212e-05, | |
| "loss": 0.0297, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.9487999999999999, | |
| "grad_norm": 2.2742340564727783, | |
| "learning_rate": 3.6885245901639346e-05, | |
| "loss": 0.0276, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.952, | |
| "grad_norm": 1.2096333503723145, | |
| "learning_rate": 3.677296204805749e-05, | |
| "loss": 0.0277, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.9552, | |
| "grad_norm": 1.3712992668151855, | |
| "learning_rate": 3.666067819447563e-05, | |
| "loss": 0.0289, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.9584000000000001, | |
| "grad_norm": 1.6062151193618774, | |
| "learning_rate": 3.6548394340893784e-05, | |
| "loss": 0.0273, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.9616, | |
| "grad_norm": 1.5088088512420654, | |
| "learning_rate": 3.643611048731193e-05, | |
| "loss": 0.0297, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.9647999999999999, | |
| "grad_norm": 0.9999478459358215, | |
| "learning_rate": 3.632382663373007e-05, | |
| "loss": 0.026, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.968, | |
| "grad_norm": 1.0416607856750488, | |
| "learning_rate": 3.6211542780148216e-05, | |
| "loss": 0.0285, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.9712, | |
| "grad_norm": 1.9191815853118896, | |
| "learning_rate": 3.609925892656636e-05, | |
| "loss": 0.0259, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.9744000000000002, | |
| "grad_norm": 1.457220196723938, | |
| "learning_rate": 3.598697507298451e-05, | |
| "loss": 0.0259, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.9776, | |
| "grad_norm": 1.0753467082977295, | |
| "learning_rate": 3.587469121940265e-05, | |
| "loss": 0.0264, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.9808, | |
| "grad_norm": 2.804377317428589, | |
| "learning_rate": 3.57624073658208e-05, | |
| "loss": 0.0258, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.984, | |
| "grad_norm": 1.9763532876968384, | |
| "learning_rate": 3.565012351223894e-05, | |
| "loss": 0.0282, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.9872, | |
| "grad_norm": 2.1442463397979736, | |
| "learning_rate": 3.5537839658657086e-05, | |
| "loss": 0.0244, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.9904, | |
| "grad_norm": 1.6756515502929688, | |
| "learning_rate": 3.542555580507524e-05, | |
| "loss": 0.0251, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.9936, | |
| "grad_norm": 2.3364181518554688, | |
| "learning_rate": 3.5313271951493374e-05, | |
| "loss": 0.0276, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.9968, | |
| "grad_norm": 3.023191452026367, | |
| "learning_rate": 3.5200988097911524e-05, | |
| "loss": 0.0266, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 2.4868950843811035, | |
| "learning_rate": 3.508870424432967e-05, | |
| "loss": 0.0273, | |
| "step": 6250 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 9375, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |