| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.7879961277831558, | |
| "eval_steps": 600, | |
| "global_step": 1800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.015488867376573089, | |
| "grad_norm": 421527552.0, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 61.1175, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.030977734753146177, | |
| "grad_norm": 337641472.0, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 60.1666, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.046466602129719266, | |
| "grad_norm": 413138944.0, | |
| "learning_rate": 3e-06, | |
| "loss": 60.9036, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.061955469506292354, | |
| "grad_norm": 1367343104.0, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 61.7597, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07744433688286544, | |
| "grad_norm": 57933824.0, | |
| "learning_rate": 5e-06, | |
| "loss": 61.2844, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.09293320425943853, | |
| "grad_norm": 94371840.0, | |
| "learning_rate": 6e-06, | |
| "loss": 60.3878, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.10842207163601161, | |
| "grad_norm": 40632320.0, | |
| "learning_rate": 7.000000000000001e-06, | |
| "loss": 59.0391, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.12391093901258471, | |
| "grad_norm": 20971520.0, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 61.2704, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.1393998063891578, | |
| "grad_norm": 110624768.0, | |
| "learning_rate": 9e-06, | |
| "loss": 59.6674, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15488867376573087, | |
| "grad_norm": 66584576.0, | |
| "learning_rate": 1e-05, | |
| "loss": 59.4588, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.17037754114230397, | |
| "grad_norm": 89653248.0, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 58.9144, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.18586640851887706, | |
| "grad_norm": 56360960.0, | |
| "learning_rate": 1.2e-05, | |
| "loss": 58.3093, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.20135527589545016, | |
| "grad_norm": 19791872.0, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 58.0143, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.21684414327202323, | |
| "grad_norm": 5242880.0, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 56.9984, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.23233301064859632, | |
| "grad_norm": 55050240.0, | |
| "learning_rate": 1.5e-05, | |
| "loss": 57.6238, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.24782187802516942, | |
| "grad_norm": 118489088.0, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 55.2309, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2633107454017425, | |
| "grad_norm": 61865984.0, | |
| "learning_rate": 1.7000000000000003e-05, | |
| "loss": 54.6207, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2787996127783156, | |
| "grad_norm": 13041664.0, | |
| "learning_rate": 1.8e-05, | |
| "loss": 52.8616, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2942884801548887, | |
| "grad_norm": 87031808.0, | |
| "learning_rate": 1.9e-05, | |
| "loss": 52.4976, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.30977734753146174, | |
| "grad_norm": 3129344.0, | |
| "learning_rate": 2e-05, | |
| "loss": 52.6041, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.32526621490803487, | |
| "grad_norm": 99090432.0, | |
| "learning_rate": 2.1e-05, | |
| "loss": 54.6559, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.34075508228460794, | |
| "grad_norm": 3238002688.0, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 52.2666, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.356243949661181, | |
| "grad_norm": 8454144.0, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 51.7553, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3717328170377541, | |
| "grad_norm": 81264640.0, | |
| "learning_rate": 2.4e-05, | |
| "loss": 49.4186, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3872216844143272, | |
| "grad_norm": 1531904.0, | |
| "learning_rate": 2.5e-05, | |
| "loss": 48.724, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.4027105517909003, | |
| "grad_norm": 6782976.0, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 47.4897, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4181994191674734, | |
| "grad_norm": 462848.0, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 46.1025, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.43368828654404645, | |
| "grad_norm": 227328.0, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 43.953, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4491771539206196, | |
| "grad_norm": 212992.0, | |
| "learning_rate": 2.9e-05, | |
| "loss": 42.786, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.46466602129719264, | |
| "grad_norm": 40704.0, | |
| "learning_rate": 3e-05, | |
| "loss": 41.3757, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4801548886737657, | |
| "grad_norm": 42991616.0, | |
| "learning_rate": 3.1e-05, | |
| "loss": 42.0031, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.49564375605033884, | |
| "grad_norm": 21102592.0, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 41.9207, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5111326234269119, | |
| "grad_norm": 16187392.0, | |
| "learning_rate": 3.3e-05, | |
| "loss": 42.2336, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.526621490803485, | |
| "grad_norm": 164864.0, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 40.6817, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.542110358180058, | |
| "grad_norm": 517996544.0, | |
| "learning_rate": 3.5e-05, | |
| "loss": 44.9866, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5575992255566312, | |
| "grad_norm": 87556096.0, | |
| "learning_rate": 3.6e-05, | |
| "loss": 47.7577, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5730880929332043, | |
| "grad_norm": 29753344.0, | |
| "learning_rate": 3.7e-05, | |
| "loss": 50.8352, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5885769603097774, | |
| "grad_norm": 38273024.0, | |
| "learning_rate": 3.8e-05, | |
| "loss": 50.4585, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.6040658276863504, | |
| "grad_norm": 24248320.0, | |
| "learning_rate": 3.9000000000000006e-05, | |
| "loss": 53.4794, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6195546950629235, | |
| "grad_norm": 637534208.0, | |
| "learning_rate": 4e-05, | |
| "loss": 53.7074, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6350435624394967, | |
| "grad_norm": 679477248.0, | |
| "learning_rate": 4.1e-05, | |
| "loss": 57.4144, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6505324298160697, | |
| "grad_norm": 583008256.0, | |
| "learning_rate": 4.2e-05, | |
| "loss": 53.7095, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6660212971926428, | |
| "grad_norm": 57933824.0, | |
| "learning_rate": 4.3e-05, | |
| "loss": 54.9929, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6815101645692159, | |
| "grad_norm": 61079552.0, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 59.2993, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6969990319457889, | |
| "grad_norm": 227540992.0, | |
| "learning_rate": 4.5e-05, | |
| "loss": 60.4038, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.712487899322362, | |
| "grad_norm": 190840832.0, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 63.1003, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7279767666989352, | |
| "grad_norm": 327155712.0, | |
| "learning_rate": 4.7e-05, | |
| "loss": 64.5368, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7434656340755083, | |
| "grad_norm": 679477248.0, | |
| "learning_rate": 4.8e-05, | |
| "loss": 61.6716, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7589545014520813, | |
| "grad_norm": 276824064.0, | |
| "learning_rate": 4.9e-05, | |
| "loss": 62.8625, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7744433688286544, | |
| "grad_norm": 78643200.0, | |
| "learning_rate": 5e-05, | |
| "loss": 63.9848, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7899322362052275, | |
| "grad_norm": 224395264.0, | |
| "learning_rate": 5.1000000000000006e-05, | |
| "loss": 64.5113, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.8054211035818006, | |
| "grad_norm": 154140672.0, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 64.9807, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8209099709583737, | |
| "grad_norm": 805306368.0, | |
| "learning_rate": 5.300000000000001e-05, | |
| "loss": 65.7812, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.8363988383349468, | |
| "grad_norm": 339738624.0, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 67.1272, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8518877057115198, | |
| "grad_norm": 1002438656.0, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 66.3905, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8673765730880929, | |
| "grad_norm": 2583691264.0, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 66.0419, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.882865440464666, | |
| "grad_norm": 411041792.0, | |
| "learning_rate": 5.6999999999999996e-05, | |
| "loss": 62.0064, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8983543078412392, | |
| "grad_norm": 70254592.0, | |
| "learning_rate": 5.8e-05, | |
| "loss": 63.6127, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9138431752178122, | |
| "grad_norm": 191889408.0, | |
| "learning_rate": 5.9e-05, | |
| "loss": 63.481, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9293320425943853, | |
| "grad_norm": 436207616.0, | |
| "learning_rate": 6e-05, | |
| "loss": 66.0833, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9293320425943853, | |
| "eval_loss": 69.23383331298828, | |
| "eval_runtime": 141.0364, | |
| "eval_samples_per_second": 10.636, | |
| "eval_steps_per_second": 2.659, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9448209099709584, | |
| "grad_norm": 33161216.0, | |
| "learning_rate": 6.1e-05, | |
| "loss": 65.1055, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9603097773475314, | |
| "grad_norm": 79167488.0, | |
| "learning_rate": 6.2e-05, | |
| "loss": 65.4074, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9757986447241046, | |
| "grad_norm": 119013376.0, | |
| "learning_rate": 6.3e-05, | |
| "loss": 64.0661, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9912875121006777, | |
| "grad_norm": 116391936.0, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 63.0841, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.0067763794772506, | |
| "grad_norm": 191889408.0, | |
| "learning_rate": 6.500000000000001e-05, | |
| "loss": 63.6633, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0222652468538238, | |
| "grad_norm": 35389440.0, | |
| "learning_rate": 6.6e-05, | |
| "loss": 63.879, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.037754114230397, | |
| "grad_norm": 219152384.0, | |
| "learning_rate": 6.7e-05, | |
| "loss": 65.5105, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.05324298160697, | |
| "grad_norm": 476053504.0, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 66.3527, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.0687318489835431, | |
| "grad_norm": 2348810240.0, | |
| "learning_rate": 6.9e-05, | |
| "loss": 66.1827, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.084220716360116, | |
| "grad_norm": 452984832.0, | |
| "learning_rate": 7e-05, | |
| "loss": 67.8703, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0997095837366893, | |
| "grad_norm": 46137344.0, | |
| "learning_rate": 7.1e-05, | |
| "loss": 67.2648, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.1151984511132624, | |
| "grad_norm": 70778880.0, | |
| "learning_rate": 7.2e-05, | |
| "loss": 62.0604, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1306873184898354, | |
| "grad_norm": 283115520.0, | |
| "learning_rate": 7.3e-05, | |
| "loss": 60.8914, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.1461761858664086, | |
| "grad_norm": 78118912.0, | |
| "learning_rate": 7.4e-05, | |
| "loss": 60.0754, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.1616650532429815, | |
| "grad_norm": 120061952.0, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 63.6695, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1771539206195547, | |
| "grad_norm": 90701824.0, | |
| "learning_rate": 7.6e-05, | |
| "loss": 65.6076, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.1926427879961277, | |
| "grad_norm": 23724032.0, | |
| "learning_rate": 7.7e-05, | |
| "loss": 64.6991, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.2081316553727008, | |
| "grad_norm": 22806528.0, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 64.4208, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.223620522749274, | |
| "grad_norm": 19529728.0, | |
| "learning_rate": 7.900000000000001e-05, | |
| "loss": 58.9719, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.239109390125847, | |
| "grad_norm": 154140672.0, | |
| "learning_rate": 8e-05, | |
| "loss": 58.4048, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2545982575024202, | |
| "grad_norm": 4882432.0, | |
| "learning_rate": 8.1e-05, | |
| "loss": 63.3226, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.2700871248789931, | |
| "grad_norm": 4718592.0, | |
| "learning_rate": 8.2e-05, | |
| "loss": 62.6276, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.2855759922555663, | |
| "grad_norm": 56098816.0, | |
| "learning_rate": 8.3e-05, | |
| "loss": 57.6699, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.3010648596321395, | |
| "grad_norm": 2281701376.0, | |
| "learning_rate": 8.4e-05, | |
| "loss": 55.8831, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.3165537270087124, | |
| "grad_norm": 63963136.0, | |
| "learning_rate": 8.5e-05, | |
| "loss": 58.8188, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3320425943852856, | |
| "grad_norm": 66322432.0, | |
| "learning_rate": 8.6e-05, | |
| "loss": 57.2062, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.3475314617618586, | |
| "grad_norm": 17956864.0, | |
| "learning_rate": 8.7e-05, | |
| "loss": 59.4092, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.3630203291384317, | |
| "grad_norm": 246415360.0, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 56.8145, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.378509196515005, | |
| "grad_norm": 205520896.0, | |
| "learning_rate": 8.900000000000001e-05, | |
| "loss": 56.4558, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.3939980638915779, | |
| "grad_norm": 90177536.0, | |
| "learning_rate": 9e-05, | |
| "loss": 54.4208, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.409486931268151, | |
| "grad_norm": 383778816.0, | |
| "learning_rate": 9.1e-05, | |
| "loss": 53.8711, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.424975798644724, | |
| "grad_norm": 23461888.0, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 52.0157, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.4404646660212972, | |
| "grad_norm": 27394048.0, | |
| "learning_rate": 9.300000000000001e-05, | |
| "loss": 48.0571, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.4559535333978704, | |
| "grad_norm": 8716288.0, | |
| "learning_rate": 9.4e-05, | |
| "loss": 43.4202, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.4714424007744433, | |
| "grad_norm": 5570560.0, | |
| "learning_rate": 9.5e-05, | |
| "loss": 40.7559, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.4869312681510165, | |
| "grad_norm": 20316160.0, | |
| "learning_rate": 9.6e-05, | |
| "loss": 39.1436, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.5024201355275895, | |
| "grad_norm": 15204352.0, | |
| "learning_rate": 9.7e-05, | |
| "loss": 36.618, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.5179090029041626, | |
| "grad_norm": 5832704.0, | |
| "learning_rate": 9.8e-05, | |
| "loss": 34.3857, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.5333978702807358, | |
| "grad_norm": 2293760.0, | |
| "learning_rate": 9.900000000000001e-05, | |
| "loss": 29.7184, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.5488867376573088, | |
| "grad_norm": 22282240.0, | |
| "learning_rate": 0.0001, | |
| "loss": 24.8724, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5643756050338817, | |
| "grad_norm": 10682368.0, | |
| "learning_rate": 9.98165137614679e-05, | |
| "loss": 21.2392, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.579864472410455, | |
| "grad_norm": 25690112.0, | |
| "learning_rate": 9.963302752293578e-05, | |
| "loss": 18.7594, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.595353339787028, | |
| "grad_norm": 15400960.0, | |
| "learning_rate": 9.944954128440368e-05, | |
| "loss": 17.9276, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.6108422071636013, | |
| "grad_norm": 2359296.0, | |
| "learning_rate": 9.926605504587157e-05, | |
| "loss": 16.505, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.6263310745401742, | |
| "grad_norm": 19005440.0, | |
| "learning_rate": 9.908256880733946e-05, | |
| "loss": 15.258, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6418199419167472, | |
| "grad_norm": 9240576.0, | |
| "learning_rate": 9.889908256880734e-05, | |
| "loss": 13.9628, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.6573088092933204, | |
| "grad_norm": 7012352.0, | |
| "learning_rate": 9.871559633027525e-05, | |
| "loss": 14.8131, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.6727976766698935, | |
| "grad_norm": 8388608.0, | |
| "learning_rate": 9.853211009174312e-05, | |
| "loss": 13.4851, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.6882865440464667, | |
| "grad_norm": 1286144.0, | |
| "learning_rate": 9.834862385321102e-05, | |
| "loss": 12.8225, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.7037754114230397, | |
| "grad_norm": 2490368.0, | |
| "learning_rate": 9.816513761467891e-05, | |
| "loss": 11.7111, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.7192642787996126, | |
| "grad_norm": 1064960.0, | |
| "learning_rate": 9.79816513761468e-05, | |
| "loss": 11.347, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.7347531461761858, | |
| "grad_norm": 352256.0, | |
| "learning_rate": 9.779816513761468e-05, | |
| "loss": 10.6953, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.750242013552759, | |
| "grad_norm": 4046848.0, | |
| "learning_rate": 9.761467889908259e-05, | |
| "loss": 10.3879, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.7657308809293322, | |
| "grad_norm": 348160.0, | |
| "learning_rate": 9.743119266055046e-05, | |
| "loss": 10.5364, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.7812197483059051, | |
| "grad_norm": 6193152.0, | |
| "learning_rate": 9.724770642201836e-05, | |
| "loss": 10.7673, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.796708615682478, | |
| "grad_norm": 671744.0, | |
| "learning_rate": 9.706422018348625e-05, | |
| "loss": 10.0088, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.8121974830590513, | |
| "grad_norm": 557056.0, | |
| "learning_rate": 9.688073394495414e-05, | |
| "loss": 10.1469, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.8276863504356244, | |
| "grad_norm": 835584.0, | |
| "learning_rate": 9.669724770642202e-05, | |
| "loss": 10.2772, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.8431752178121976, | |
| "grad_norm": 2277376.0, | |
| "learning_rate": 9.651376146788991e-05, | |
| "loss": 9.8019, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.8586640851887706, | |
| "grad_norm": 307200.0, | |
| "learning_rate": 9.63302752293578e-05, | |
| "loss": 9.5204, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.8586640851887706, | |
| "eval_loss": 9.286345481872559, | |
| "eval_runtime": 140.9651, | |
| "eval_samples_per_second": 10.641, | |
| "eval_steps_per_second": 2.66, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.8741529525653435, | |
| "grad_norm": 315392.0, | |
| "learning_rate": 9.61467889908257e-05, | |
| "loss": 9.7153, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.8896418199419167, | |
| "grad_norm": 1089536.0, | |
| "learning_rate": 9.596330275229359e-05, | |
| "loss": 9.847, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.90513068731849, | |
| "grad_norm": 1597440.0, | |
| "learning_rate": 9.577981651376148e-05, | |
| "loss": 9.5979, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.920619554695063, | |
| "grad_norm": 481280.0, | |
| "learning_rate": 9.559633027522936e-05, | |
| "loss": 9.4879, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.936108422071636, | |
| "grad_norm": 847872.0, | |
| "learning_rate": 9.541284403669725e-05, | |
| "loss": 8.8939, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.951597289448209, | |
| "grad_norm": 292864.0, | |
| "learning_rate": 9.522935779816514e-05, | |
| "loss": 9.2976, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.9670861568247822, | |
| "grad_norm": 1630208.0, | |
| "learning_rate": 9.504587155963304e-05, | |
| "loss": 8.9201, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.9825750242013553, | |
| "grad_norm": 471040.0, | |
| "learning_rate": 9.486238532110093e-05, | |
| "loss": 8.9614, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.9980638915779285, | |
| "grad_norm": 761856.0, | |
| "learning_rate": 9.467889908256882e-05, | |
| "loss": 9.0607, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.0135527589545013, | |
| "grad_norm": 512000.0, | |
| "learning_rate": 9.44954128440367e-05, | |
| "loss": 9.3156, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.0290416263310744, | |
| "grad_norm": 321536.0, | |
| "learning_rate": 9.431192660550459e-05, | |
| "loss": 9.0155, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.0445304937076476, | |
| "grad_norm": 2555904.0, | |
| "learning_rate": 9.412844036697248e-05, | |
| "loss": 8.7237, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.060019361084221, | |
| "grad_norm": 1638400.0, | |
| "learning_rate": 9.394495412844038e-05, | |
| "loss": 8.559, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.075508228460794, | |
| "grad_norm": 6914048.0, | |
| "learning_rate": 9.376146788990827e-05, | |
| "loss": 8.7204, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.0909970958373667, | |
| "grad_norm": 270336.0, | |
| "learning_rate": 9.357798165137616e-05, | |
| "loss": 8.7914, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.10648596321394, | |
| "grad_norm": 3145728.0, | |
| "learning_rate": 9.339449541284404e-05, | |
| "loss": 10.1368, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.121974830590513, | |
| "grad_norm": 305152.0, | |
| "learning_rate": 9.321100917431193e-05, | |
| "loss": 9.0641, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.1374636979670862, | |
| "grad_norm": 651264.0, | |
| "learning_rate": 9.302752293577982e-05, | |
| "loss": 8.5292, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.1529525653436594, | |
| "grad_norm": 323584.0, | |
| "learning_rate": 9.284403669724772e-05, | |
| "loss": 8.3136, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.168441432720232, | |
| "grad_norm": 692224.0, | |
| "learning_rate": 9.266055045871561e-05, | |
| "loss": 8.4924, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.1839303000968053, | |
| "grad_norm": 1941504.0, | |
| "learning_rate": 9.24770642201835e-05, | |
| "loss": 7.9653, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.1994191674733785, | |
| "grad_norm": 3325952.0, | |
| "learning_rate": 9.229357798165138e-05, | |
| "loss": 8.2938, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.2149080348499517, | |
| "grad_norm": 7897088.0, | |
| "learning_rate": 9.211009174311927e-05, | |
| "loss": 8.6513, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.230396902226525, | |
| "grad_norm": 229638144.0, | |
| "learning_rate": 9.192660550458716e-05, | |
| "loss": 8.9772, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.2458857696030976, | |
| "grad_norm": 1990656.0, | |
| "learning_rate": 9.174311926605506e-05, | |
| "loss": 9.0591, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.261374636979671, | |
| "grad_norm": 2113536.0, | |
| "learning_rate": 9.155963302752293e-05, | |
| "loss": 8.5508, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.276863504356244, | |
| "grad_norm": 14352384.0, | |
| "learning_rate": 9.137614678899083e-05, | |
| "loss": 8.6085, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.292352371732817, | |
| "grad_norm": 2277376.0, | |
| "learning_rate": 9.119266055045872e-05, | |
| "loss": 8.8426, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.3078412391093903, | |
| "grad_norm": 26476544.0, | |
| "learning_rate": 9.100917431192661e-05, | |
| "loss": 9.0669, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.323330106485963, | |
| "grad_norm": 85458944.0, | |
| "learning_rate": 9.08256880733945e-05, | |
| "loss": 9.6445, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.3388189738625362, | |
| "grad_norm": 128974848.0, | |
| "learning_rate": 9.06422018348624e-05, | |
| "loss": 9.6751, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.3543078412391094, | |
| "grad_norm": 32636928.0, | |
| "learning_rate": 9.045871559633027e-05, | |
| "loss": 9.2084, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.3697967086156826, | |
| "grad_norm": 175112192.0, | |
| "learning_rate": 9.027522935779817e-05, | |
| "loss": 9.3795, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.3852855759922553, | |
| "grad_norm": 25034752.0, | |
| "learning_rate": 9.009174311926606e-05, | |
| "loss": 9.6548, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.4007744433688285, | |
| "grad_norm": 8093696.0, | |
| "learning_rate": 8.990825688073395e-05, | |
| "loss": 9.0062, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.4162633107454017, | |
| "grad_norm": 3866624.0, | |
| "learning_rate": 8.972477064220184e-05, | |
| "loss": 8.6427, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.431752178121975, | |
| "grad_norm": 1433600.0, | |
| "learning_rate": 8.954128440366974e-05, | |
| "loss": 8.9959, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.447241045498548, | |
| "grad_norm": 4030464.0, | |
| "learning_rate": 8.935779816513761e-05, | |
| "loss": 8.9193, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.4627299128751208, | |
| "grad_norm": 37224448.0, | |
| "learning_rate": 8.917431192660551e-05, | |
| "loss": 8.9464, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.478218780251694, | |
| "grad_norm": 1003520.0, | |
| "learning_rate": 8.89908256880734e-05, | |
| "loss": 9.1216, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.493707647628267, | |
| "grad_norm": 2801664.0, | |
| "learning_rate": 8.880733944954129e-05, | |
| "loss": 8.9064, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.5091965150048403, | |
| "grad_norm": 2211840.0, | |
| "learning_rate": 8.862385321100918e-05, | |
| "loss": 9.062, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.5246853823814135, | |
| "grad_norm": 868352.0, | |
| "learning_rate": 8.844036697247708e-05, | |
| "loss": 9.2384, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.5401742497579862, | |
| "grad_norm": 3964928.0, | |
| "learning_rate": 8.825688073394495e-05, | |
| "loss": 9.0259, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.5556631171345594, | |
| "grad_norm": 1802240.0, | |
| "learning_rate": 8.807339449541285e-05, | |
| "loss": 9.0643, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.5711519845111326, | |
| "grad_norm": 73216.0, | |
| "learning_rate": 8.788990825688074e-05, | |
| "loss": 8.8911, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.5866408518877058, | |
| "grad_norm": 2719744.0, | |
| "learning_rate": 8.770642201834863e-05, | |
| "loss": 8.7628, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.602129719264279, | |
| "grad_norm": 1908736.0, | |
| "learning_rate": 8.752293577981652e-05, | |
| "loss": 8.3985, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.6176185866408517, | |
| "grad_norm": 55574528.0, | |
| "learning_rate": 8.733944954128442e-05, | |
| "loss": 8.386, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.633107454017425, | |
| "grad_norm": 561152.0, | |
| "learning_rate": 8.715596330275229e-05, | |
| "loss": 7.9368, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.648596321393998, | |
| "grad_norm": 179200.0, | |
| "learning_rate": 8.697247706422019e-05, | |
| "loss": 7.37, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.664085188770571, | |
| "grad_norm": 56320.0, | |
| "learning_rate": 8.678899082568808e-05, | |
| "loss": 7.0107, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.6795740561471444, | |
| "grad_norm": 32256.0, | |
| "learning_rate": 8.660550458715597e-05, | |
| "loss": 6.9978, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.695062923523717, | |
| "grad_norm": 91136.0, | |
| "learning_rate": 8.642201834862386e-05, | |
| "loss": 6.2097, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.7105517909002903, | |
| "grad_norm": 35584.0, | |
| "learning_rate": 8.623853211009176e-05, | |
| "loss": 5.6404, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.7260406582768635, | |
| "grad_norm": 146432.0, | |
| "learning_rate": 8.605504587155963e-05, | |
| "loss": 5.6008, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.7415295256534367, | |
| "grad_norm": 229376.0, | |
| "learning_rate": 8.587155963302753e-05, | |
| "loss": 4.993, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.75701839303001, | |
| "grad_norm": 208896.0, | |
| "learning_rate": 8.568807339449542e-05, | |
| "loss": 5.0213, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.7725072604065826, | |
| "grad_norm": 41156608.0, | |
| "learning_rate": 8.550458715596331e-05, | |
| "loss": 8.4373, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.7879961277831558, | |
| "grad_norm": 72351744.0, | |
| "learning_rate": 8.53211009174312e-05, | |
| "loss": 11.2268, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.7879961277831558, | |
| "eval_loss": 3.6953983306884766, | |
| "eval_runtime": 140.9159, | |
| "eval_samples_per_second": 10.645, | |
| "eval_steps_per_second": 2.661, | |
| "step": 1800 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6450, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 600, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9.851925798951322e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |