| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1131, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002652519893899204, | |
| "grad_norm": 968.0919232146924, | |
| "learning_rate": 8.771929824561404e-08, | |
| "loss": 12.4211, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005305039787798408, | |
| "grad_norm": 1026.2449443021458, | |
| "learning_rate": 1.7543859649122808e-07, | |
| "loss": 12.452, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.007957559681697613, | |
| "grad_norm": 1017.251056225472, | |
| "learning_rate": 2.6315789473684213e-07, | |
| "loss": 12.264, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.010610079575596816, | |
| "grad_norm": 1065.85515059101, | |
| "learning_rate": 3.5087719298245616e-07, | |
| "loss": 12.3229, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.013262599469496022, | |
| "grad_norm": 952.5383153380988, | |
| "learning_rate": 4.385964912280702e-07, | |
| "loss": 12.3374, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.015915119363395226, | |
| "grad_norm": 1013.6977210971099, | |
| "learning_rate": 5.263157894736843e-07, | |
| "loss": 12.316, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01856763925729443, | |
| "grad_norm": 1060.4701992924765, | |
| "learning_rate": 6.140350877192982e-07, | |
| "loss": 12.1177, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.021220159151193633, | |
| "grad_norm": 874.2817791313958, | |
| "learning_rate": 7.017543859649123e-07, | |
| "loss": 12.059, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.023872679045092837, | |
| "grad_norm": 1034.117575834617, | |
| "learning_rate": 7.894736842105263e-07, | |
| "loss": 11.6855, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.026525198938992044, | |
| "grad_norm": 958.1913728158022, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 11.4163, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.029177718832891247, | |
| "grad_norm": 1220.2117771599096, | |
| "learning_rate": 9.649122807017545e-07, | |
| "loss": 10.6914, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03183023872679045, | |
| "grad_norm": 1048.1213049240255, | |
| "learning_rate": 1.0526315789473685e-06, | |
| "loss": 10.1105, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 789.9306134938697, | |
| "learning_rate": 1.1403508771929824e-06, | |
| "loss": 10.0797, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03713527851458886, | |
| "grad_norm": 949.2047562477611, | |
| "learning_rate": 1.2280701754385965e-06, | |
| "loss": 9.7583, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03978779840848806, | |
| "grad_norm": 754.369190005223, | |
| "learning_rate": 1.3157894736842106e-06, | |
| "loss": 8.4975, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.042440318302387266, | |
| "grad_norm": 824.0571350755927, | |
| "learning_rate": 1.4035087719298246e-06, | |
| "loss": 8.4441, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04509283819628647, | |
| "grad_norm": 1037.9890686137453, | |
| "learning_rate": 1.4912280701754387e-06, | |
| "loss": 7.9298, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04774535809018567, | |
| "grad_norm": 765.6957994118179, | |
| "learning_rate": 1.5789473684210526e-06, | |
| "loss": 7.7697, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.050397877984084884, | |
| "grad_norm": 718.3426984808572, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 7.3831, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.05305039787798409, | |
| "grad_norm": 553.427473727637, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 6.9709, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05570291777188329, | |
| "grad_norm": 483.40199996798475, | |
| "learning_rate": 1.8421052631578948e-06, | |
| "loss": 5.811, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.058355437665782495, | |
| "grad_norm": 707.095094504861, | |
| "learning_rate": 1.929824561403509e-06, | |
| "loss": 5.5033, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0610079575596817, | |
| "grad_norm": 553.1469775776469, | |
| "learning_rate": 2.017543859649123e-06, | |
| "loss": 5.1927, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0636604774535809, | |
| "grad_norm": 419.1819226196746, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 4.874, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.06631299734748011, | |
| "grad_norm": 608.8428174720044, | |
| "learning_rate": 2.192982456140351e-06, | |
| "loss": 4.7575, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 333.80789191200347, | |
| "learning_rate": 2.280701754385965e-06, | |
| "loss": 4.5398, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.07161803713527852, | |
| "grad_norm": 368.1717826168138, | |
| "learning_rate": 2.368421052631579e-06, | |
| "loss": 4.4244, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.07427055702917772, | |
| "grad_norm": 393.3469206961459, | |
| "learning_rate": 2.456140350877193e-06, | |
| "loss": 4.2777, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 456.3624347928031, | |
| "learning_rate": 2.5438596491228075e-06, | |
| "loss": 4.0362, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07957559681697612, | |
| "grad_norm": 424.8598900805099, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 3.932, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08222811671087533, | |
| "grad_norm": 260.5563303150786, | |
| "learning_rate": 2.7192982456140356e-06, | |
| "loss": 3.648, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.08488063660477453, | |
| "grad_norm": 268.7488965609003, | |
| "learning_rate": 2.8070175438596493e-06, | |
| "loss": 3.5179, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.08753315649867374, | |
| "grad_norm": 280.7842985515052, | |
| "learning_rate": 2.8947368421052634e-06, | |
| "loss": 3.409, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.09018567639257294, | |
| "grad_norm": 352.12688319511284, | |
| "learning_rate": 2.9824561403508774e-06, | |
| "loss": 3.3254, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.09283819628647215, | |
| "grad_norm": 288.5902403747857, | |
| "learning_rate": 3.0701754385964915e-06, | |
| "loss": 3.1845, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.09549071618037135, | |
| "grad_norm": 304.1109830674416, | |
| "learning_rate": 3.157894736842105e-06, | |
| "loss": 3.0876, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.09814323607427056, | |
| "grad_norm": 232.21896852625596, | |
| "learning_rate": 3.2456140350877197e-06, | |
| "loss": 3.0052, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.10079575596816977, | |
| "grad_norm": 255.80964498093417, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 2.9565, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 222.66022371200634, | |
| "learning_rate": 3.421052631578948e-06, | |
| "loss": 2.9059, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.10610079575596817, | |
| "grad_norm": 237.06732274651333, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 2.8504, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.10875331564986737, | |
| "grad_norm": 298.7469595360898, | |
| "learning_rate": 3.596491228070176e-06, | |
| "loss": 2.8541, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.11140583554376658, | |
| "grad_norm": 229.14920993336642, | |
| "learning_rate": 3.6842105263157896e-06, | |
| "loss": 2.7648, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.11405835543766578, | |
| "grad_norm": 436.82119269000754, | |
| "learning_rate": 3.7719298245614037e-06, | |
| "loss": 2.8093, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.11671087533156499, | |
| "grad_norm": 256.71171046255853, | |
| "learning_rate": 3.859649122807018e-06, | |
| "loss": 2.6412, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.11936339522546419, | |
| "grad_norm": 371.9802996469374, | |
| "learning_rate": 3.947368421052632e-06, | |
| "loss": 2.7262, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1220159151193634, | |
| "grad_norm": 261.98684757391896, | |
| "learning_rate": 4.035087719298246e-06, | |
| "loss": 2.6309, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1246684350132626, | |
| "grad_norm": 217.98738126104192, | |
| "learning_rate": 4.12280701754386e-06, | |
| "loss": 2.6587, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1273209549071618, | |
| "grad_norm": 262.62076176117773, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 2.728, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.129973474801061, | |
| "grad_norm": 206.72393702517692, | |
| "learning_rate": 4.298245614035088e-06, | |
| "loss": 2.5768, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.13262599469496023, | |
| "grad_norm": 240.11903830674424, | |
| "learning_rate": 4.385964912280702e-06, | |
| "loss": 2.6957, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13527851458885942, | |
| "grad_norm": 363.69145331696933, | |
| "learning_rate": 4.473684210526316e-06, | |
| "loss": 2.9907, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 210.8252588976506, | |
| "learning_rate": 4.56140350877193e-06, | |
| "loss": 2.4764, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.14058355437665782, | |
| "grad_norm": 235.74742130809076, | |
| "learning_rate": 4.649122807017544e-06, | |
| "loss": 2.5137, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.14323607427055704, | |
| "grad_norm": 328.4498421369927, | |
| "learning_rate": 4.736842105263158e-06, | |
| "loss": 2.6422, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.14588859416445624, | |
| "grad_norm": 248.58638479316332, | |
| "learning_rate": 4.824561403508772e-06, | |
| "loss": 2.5274, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14854111405835543, | |
| "grad_norm": 261.9775254925213, | |
| "learning_rate": 4.912280701754386e-06, | |
| "loss": 2.5519, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.15119363395225463, | |
| "grad_norm": 281.2075117591644, | |
| "learning_rate": 5e-06, | |
| "loss": 2.4874, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 253.54349985340127, | |
| "learning_rate": 5.087719298245615e-06, | |
| "loss": 2.5132, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.15649867374005305, | |
| "grad_norm": 220.72491071710908, | |
| "learning_rate": 5.175438596491229e-06, | |
| "loss": 2.4212, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.15915119363395225, | |
| "grad_norm": 252.66943722144052, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 2.5385, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.16180371352785147, | |
| "grad_norm": 224.39089747364216, | |
| "learning_rate": 5.350877192982457e-06, | |
| "loss": 2.4101, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.16445623342175067, | |
| "grad_norm": 262.76676290957175, | |
| "learning_rate": 5.438596491228071e-06, | |
| "loss": 2.409, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.16710875331564987, | |
| "grad_norm": 323.6668556817878, | |
| "learning_rate": 5.526315789473685e-06, | |
| "loss": 2.5888, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.16976127320954906, | |
| "grad_norm": 241.96152699479308, | |
| "learning_rate": 5.6140350877192985e-06, | |
| "loss": 2.4001, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 239.23277265884937, | |
| "learning_rate": 5.701754385964913e-06, | |
| "loss": 2.4135, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.17506631299734748, | |
| "grad_norm": 179.6461482025435, | |
| "learning_rate": 5.789473684210527e-06, | |
| "loss": 2.3741, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.17771883289124668, | |
| "grad_norm": 175.576493404712, | |
| "learning_rate": 5.877192982456141e-06, | |
| "loss": 2.38, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.18037135278514588, | |
| "grad_norm": 223.99940759589632, | |
| "learning_rate": 5.964912280701755e-06, | |
| "loss": 2.4092, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1830238726790451, | |
| "grad_norm": 176.1394420449391, | |
| "learning_rate": 6.0526315789473685e-06, | |
| "loss": 2.3259, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1856763925729443, | |
| "grad_norm": 196.34227933609677, | |
| "learning_rate": 6.140350877192983e-06, | |
| "loss": 2.3665, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1883289124668435, | |
| "grad_norm": 186.26080573377942, | |
| "learning_rate": 6.2280701754385975e-06, | |
| "loss": 2.3239, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1909814323607427, | |
| "grad_norm": 250.63759991206928, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 2.4194, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.19363395225464192, | |
| "grad_norm": 184.67595443945896, | |
| "learning_rate": 6.403508771929825e-06, | |
| "loss": 2.2825, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.1962864721485411, | |
| "grad_norm": 315.4699350961708, | |
| "learning_rate": 6.491228070175439e-06, | |
| "loss": 2.6249, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1989389920424403, | |
| "grad_norm": 196.94429675165398, | |
| "learning_rate": 6.578947368421054e-06, | |
| "loss": 2.2925, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.20159151193633953, | |
| "grad_norm": 232.42841343000674, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 2.5517, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.20424403183023873, | |
| "grad_norm": 169.87354614948592, | |
| "learning_rate": 6.754385964912281e-06, | |
| "loss": 2.2481, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 178.528334398205, | |
| "learning_rate": 6.842105263157896e-06, | |
| "loss": 2.247, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.20954907161803712, | |
| "grad_norm": 197.92006440702107, | |
| "learning_rate": 6.92982456140351e-06, | |
| "loss": 2.4433, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.21220159151193635, | |
| "grad_norm": 197.10177112042496, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 2.2713, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.21485411140583555, | |
| "grad_norm": 172.54053286188068, | |
| "learning_rate": 7.1052631578947375e-06, | |
| "loss": 2.3019, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.21750663129973474, | |
| "grad_norm": 228.60506906514024, | |
| "learning_rate": 7.192982456140352e-06, | |
| "loss": 2.1584, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.22015915119363394, | |
| "grad_norm": 137.3028809321962, | |
| "learning_rate": 7.280701754385966e-06, | |
| "loss": 2.1627, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.22281167108753316, | |
| "grad_norm": 138.40608701456674, | |
| "learning_rate": 7.368421052631579e-06, | |
| "loss": 2.1745, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.22546419098143236, | |
| "grad_norm": 162.95787474014585, | |
| "learning_rate": 7.456140350877194e-06, | |
| "loss": 2.1807, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.22811671087533156, | |
| "grad_norm": 137.23258451294802, | |
| "learning_rate": 7.5438596491228074e-06, | |
| "loss": 2.2449, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 136.69569023050238, | |
| "learning_rate": 7.631578947368423e-06, | |
| "loss": 2.0813, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.23342175066312998, | |
| "grad_norm": 150.51052121600267, | |
| "learning_rate": 7.719298245614036e-06, | |
| "loss": 2.2483, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.23607427055702918, | |
| "grad_norm": 130.82973054024674, | |
| "learning_rate": 7.80701754385965e-06, | |
| "loss": 2.0624, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.23872679045092837, | |
| "grad_norm": 197.9266835754521, | |
| "learning_rate": 7.894736842105265e-06, | |
| "loss": 2.5235, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 139.32851502743648, | |
| "learning_rate": 7.982456140350877e-06, | |
| "loss": 2.4074, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2440318302387268, | |
| "grad_norm": 161.62586817939928, | |
| "learning_rate": 8.070175438596492e-06, | |
| "loss": 2.6251, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.246684350132626, | |
| "grad_norm": 130.369540296953, | |
| "learning_rate": 8.157894736842106e-06, | |
| "loss": 2.218, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2493368700265252, | |
| "grad_norm": 172.2161586170502, | |
| "learning_rate": 8.24561403508772e-06, | |
| "loss": 2.4513, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2519893899204244, | |
| "grad_norm": 133.83426479043686, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 2.1488, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2546419098143236, | |
| "grad_norm": 162.54679516690865, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 2.1986, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2572944297082228, | |
| "grad_norm": 117.31357015065828, | |
| "learning_rate": 8.508771929824563e-06, | |
| "loss": 2.1464, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.259946949602122, | |
| "grad_norm": 95.62016778990143, | |
| "learning_rate": 8.596491228070176e-06, | |
| "loss": 1.9437, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2625994694960212, | |
| "grad_norm": 112.95580192337754, | |
| "learning_rate": 8.68421052631579e-06, | |
| "loss": 2.0702, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.26525198938992045, | |
| "grad_norm": 133.0945606777901, | |
| "learning_rate": 8.771929824561405e-06, | |
| "loss": 1.9378, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.26790450928381965, | |
| "grad_norm": 146.2661005058808, | |
| "learning_rate": 8.859649122807017e-06, | |
| "loss": 1.9705, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.27055702917771884, | |
| "grad_norm": 140.71789603854634, | |
| "learning_rate": 8.947368421052632e-06, | |
| "loss": 2.0788, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.27320954907161804, | |
| "grad_norm": 127.48171170499167, | |
| "learning_rate": 9.035087719298246e-06, | |
| "loss": 1.8274, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 131.29016341680753, | |
| "learning_rate": 9.12280701754386e-06, | |
| "loss": 2.2052, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.27851458885941643, | |
| "grad_norm": 83.38173666635986, | |
| "learning_rate": 9.210526315789474e-06, | |
| "loss": 1.8772, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.28116710875331563, | |
| "grad_norm": 95.4005640169517, | |
| "learning_rate": 9.298245614035088e-06, | |
| "loss": 1.8585, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.2838196286472148, | |
| "grad_norm": 81.6868222976919, | |
| "learning_rate": 9.385964912280703e-06, | |
| "loss": 1.7842, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.2864721485411141, | |
| "grad_norm": 101.3668892091329, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 1.955, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.2891246684350133, | |
| "grad_norm": 90.10488002182764, | |
| "learning_rate": 9.56140350877193e-06, | |
| "loss": 1.9475, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.2917771883289125, | |
| "grad_norm": 75.64098223185391, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 1.7069, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.29442970822281167, | |
| "grad_norm": 70.02492521413883, | |
| "learning_rate": 9.736842105263159e-06, | |
| "loss": 1.9182, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.29708222811671087, | |
| "grad_norm": 113.78219869230792, | |
| "learning_rate": 9.824561403508772e-06, | |
| "loss": 2.3653, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.29973474801061006, | |
| "grad_norm": 78.94175240520144, | |
| "learning_rate": 9.912280701754386e-06, | |
| "loss": 1.9443, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.30238726790450926, | |
| "grad_norm": 78.49120181281637, | |
| "learning_rate": 1e-05, | |
| "loss": 1.8674, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3050397877984085, | |
| "grad_norm": 86.72103695721908, | |
| "learning_rate": 9.999976144006772e-06, | |
| "loss": 2.2703, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 47.848839063498325, | |
| "learning_rate": 9.999904576254723e-06, | |
| "loss": 1.572, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 79.61015049499017, | |
| "learning_rate": 9.999785297426788e-06, | |
| "loss": 1.7971, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3129973474801061, | |
| "grad_norm": 68.38718933568896, | |
| "learning_rate": 9.99961830866117e-06, | |
| "loss": 1.9313, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3156498673740053, | |
| "grad_norm": 51.179876920034914, | |
| "learning_rate": 9.999403611551341e-06, | |
| "loss": 1.5565, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3183023872679045, | |
| "grad_norm": 64.29725797705285, | |
| "learning_rate": 9.999141208146029e-06, | |
| "loss": 1.5536, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3209549071618037, | |
| "grad_norm": 49.96518518478552, | |
| "learning_rate": 9.998831100949188e-06, | |
| "loss": 1.4645, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.32360742705570295, | |
| "grad_norm": 49.25606054644458, | |
| "learning_rate": 9.998473292919987e-06, | |
| "loss": 1.5135, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.32625994694960214, | |
| "grad_norm": 38.39002954508541, | |
| "learning_rate": 9.998067787472772e-06, | |
| "loss": 1.3848, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.32891246684350134, | |
| "grad_norm": 51.46260995748639, | |
| "learning_rate": 9.997614588477033e-06, | |
| "loss": 1.5078, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.33156498673740054, | |
| "grad_norm": 47.707086918124624, | |
| "learning_rate": 9.997113700257383e-06, | |
| "loss": 1.3766, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.33421750663129973, | |
| "grad_norm": 64.04590461927296, | |
| "learning_rate": 9.99656512759349e-06, | |
| "loss": 1.4349, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.33687002652519893, | |
| "grad_norm": 53.447600347293225, | |
| "learning_rate": 9.995968875720052e-06, | |
| "loss": 1.482, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.3395225464190981, | |
| "grad_norm": 48.31587410813423, | |
| "learning_rate": 9.995324950326746e-06, | |
| "loss": 1.3262, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3421750663129973, | |
| "grad_norm": 69.41825947491212, | |
| "learning_rate": 9.994633357558158e-06, | |
| "loss": 1.4316, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 57.775756049681775, | |
| "learning_rate": 9.993894104013748e-06, | |
| "loss": 1.4574, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.34748010610079577, | |
| "grad_norm": 41.27089386693434, | |
| "learning_rate": 9.99310719674776e-06, | |
| "loss": 1.2503, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.35013262599469497, | |
| "grad_norm": 34.4370860027977, | |
| "learning_rate": 9.992272643269181e-06, | |
| "loss": 1.0917, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.35278514588859416, | |
| "grad_norm": 47.92471790261903, | |
| "learning_rate": 9.99139045154165e-06, | |
| "loss": 1.2382, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.35543766578249336, | |
| "grad_norm": 49.9081128353915, | |
| "learning_rate": 9.99046062998339e-06, | |
| "loss": 1.3388, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.35809018567639256, | |
| "grad_norm": 45.548101311222744, | |
| "learning_rate": 9.989483187467128e-06, | |
| "loss": 1.127, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.36074270557029176, | |
| "grad_norm": 56.784478228249924, | |
| "learning_rate": 9.988458133320009e-06, | |
| "loss": 1.29, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.363395225464191, | |
| "grad_norm": 45.43238811368123, | |
| "learning_rate": 9.987385477323507e-06, | |
| "loss": 1.2792, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.3660477453580902, | |
| "grad_norm": 51.89382859053669, | |
| "learning_rate": 9.986265229713332e-06, | |
| "loss": 1.3534, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3687002652519894, | |
| "grad_norm": 46.33340281664256, | |
| "learning_rate": 9.985097401179333e-06, | |
| "loss": 1.3419, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3713527851458886, | |
| "grad_norm": 35.94198985151234, | |
| "learning_rate": 9.983882002865392e-06, | |
| "loss": 0.9718, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3740053050397878, | |
| "grad_norm": 35.294849184766264, | |
| "learning_rate": 9.982619046369321e-06, | |
| "loss": 1.003, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.376657824933687, | |
| "grad_norm": 42.16976850569041, | |
| "learning_rate": 9.981308543742759e-06, | |
| "loss": 1.2051, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 47.66629555583024, | |
| "learning_rate": 9.979950507491035e-06, | |
| "loss": 1.499, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.3819628647214854, | |
| "grad_norm": 31.799357696770404, | |
| "learning_rate": 9.978544950573075e-06, | |
| "loss": 0.9198, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 31.399975284772456, | |
| "learning_rate": 9.97709188640126e-06, | |
| "loss": 0.8313, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.38726790450928383, | |
| "grad_norm": 39.60233683027201, | |
| "learning_rate": 9.975591328841306e-06, | |
| "loss": 0.9907, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.38992042440318303, | |
| "grad_norm": 50.72888144941861, | |
| "learning_rate": 9.974043292212129e-06, | |
| "loss": 0.9659, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3925729442970822, | |
| "grad_norm": 34.2382037770928, | |
| "learning_rate": 9.97244779128571e-06, | |
| "loss": 0.8095, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.3952254641909814, | |
| "grad_norm": 30.53623445439956, | |
| "learning_rate": 9.970804841286954e-06, | |
| "loss": 0.7986, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3978779840848806, | |
| "grad_norm": 21.32608698047807, | |
| "learning_rate": 9.96911445789354e-06, | |
| "loss": 0.656, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4005305039787798, | |
| "grad_norm": 27.679120212238363, | |
| "learning_rate": 9.96737665723578e-06, | |
| "loss": 0.7913, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.40318302387267907, | |
| "grad_norm": 32.969472057658585, | |
| "learning_rate": 9.965591455896456e-06, | |
| "loss": 0.841, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.40583554376657827, | |
| "grad_norm": 412.28241091884604, | |
| "learning_rate": 9.963758870910672e-06, | |
| "loss": 1.2917, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.40848806366047746, | |
| "grad_norm": 49.04456362529717, | |
| "learning_rate": 9.961878919765678e-06, | |
| "loss": 0.8898, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.41114058355437666, | |
| "grad_norm": 39.36253019431454, | |
| "learning_rate": 9.95995162040072e-06, | |
| "loss": 0.7099, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 34.63935013627246, | |
| "learning_rate": 9.957976991206847e-06, | |
| "loss": 0.7765, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.41644562334217505, | |
| "grad_norm": 26.840735102660002, | |
| "learning_rate": 9.95595505102676e-06, | |
| "loss": 0.6499, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.41909814323607425, | |
| "grad_norm": 63.18942895204754, | |
| "learning_rate": 9.953885819154615e-06, | |
| "loss": 1.0861, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.4217506631299735, | |
| "grad_norm": 25.445198024275147, | |
| "learning_rate": 9.951769315335843e-06, | |
| "loss": 0.7166, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.4244031830238727, | |
| "grad_norm": 26.58185371356998, | |
| "learning_rate": 9.949605559766969e-06, | |
| "loss": 0.5657, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4270557029177719, | |
| "grad_norm": 27.958321235093127, | |
| "learning_rate": 9.947394573095403e-06, | |
| "loss": 0.5804, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.4297082228116711, | |
| "grad_norm": 29.962873219114176, | |
| "learning_rate": 9.94513637641926e-06, | |
| "loss": 0.5276, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4323607427055703, | |
| "grad_norm": 25.88658622108772, | |
| "learning_rate": 9.942830991287149e-06, | |
| "loss": 0.5638, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4350132625994695, | |
| "grad_norm": 28.58328032342846, | |
| "learning_rate": 9.940478439697973e-06, | |
| "loss": 0.5453, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4376657824933687, | |
| "grad_norm": 31.677528986906, | |
| "learning_rate": 9.938078744100713e-06, | |
| "loss": 0.5716, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.4403183023872679, | |
| "grad_norm": 26.616435831422383, | |
| "learning_rate": 9.935631927394216e-06, | |
| "loss": 0.5028, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.44297082228116713, | |
| "grad_norm": 34.69859278391251, | |
| "learning_rate": 9.933138012926982e-06, | |
| "loss": 0.5672, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.44562334217506633, | |
| "grad_norm": 24.597680749706683, | |
| "learning_rate": 9.930597024496933e-06, | |
| "loss": 0.4989, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 47.44734871590014, | |
| "learning_rate": 9.928008986351187e-06, | |
| "loss": 0.5661, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.4509283819628647, | |
| "grad_norm": 27.688296544663764, | |
| "learning_rate": 9.925373923185835e-06, | |
| "loss": 0.5102, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4535809018567639, | |
| "grad_norm": 28.913477864873013, | |
| "learning_rate": 9.922691860145696e-06, | |
| "loss": 0.527, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.4562334217506631, | |
| "grad_norm": 23.76336979943884, | |
| "learning_rate": 9.919962822824083e-06, | |
| "loss": 0.4584, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.4588859416445623, | |
| "grad_norm": 27.428221662294682, | |
| "learning_rate": 9.917186837262552e-06, | |
| "loss": 0.5161, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 24.816760913180836, | |
| "learning_rate": 9.91436392995066e-06, | |
| "loss": 0.4373, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.46419098143236076, | |
| "grad_norm": 20.042908299766527, | |
| "learning_rate": 9.91149412782571e-06, | |
| "loss": 0.3862, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.46684350132625996, | |
| "grad_norm": 22.52969310546478, | |
| "learning_rate": 9.908577458272496e-06, | |
| "loss": 0.4759, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.46949602122015915, | |
| "grad_norm": 35.08325684519957, | |
| "learning_rate": 9.905613949123036e-06, | |
| "loss": 0.443, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.47214854111405835, | |
| "grad_norm": 27.25235345890855, | |
| "learning_rate": 9.902603628656312e-06, | |
| "loss": 0.3758, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.47480106100795755, | |
| "grad_norm": 45.22071533665291, | |
| "learning_rate": 9.899546525597998e-06, | |
| "loss": 0.5609, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.47745358090185674, | |
| "grad_norm": 37.26690294810362, | |
| "learning_rate": 9.896442669120188e-06, | |
| "loss": 0.4942, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.48010610079575594, | |
| "grad_norm": 29.928428373271707, | |
| "learning_rate": 9.893292088841109e-06, | |
| "loss": 0.4503, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 21.722116105223737, | |
| "learning_rate": 9.890094814824854e-06, | |
| "loss": 0.3151, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4854111405835544, | |
| "grad_norm": 24.607735893749748, | |
| "learning_rate": 9.886850877581079e-06, | |
| "loss": 0.3298, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4880636604774536, | |
| "grad_norm": 33.43828584668856, | |
| "learning_rate": 9.883560308064723e-06, | |
| "loss": 0.3457, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4907161803713528, | |
| "grad_norm": 31.065576895170462, | |
| "learning_rate": 9.880223137675709e-06, | |
| "loss": 0.3693, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.493368700265252, | |
| "grad_norm": 37.49242844056875, | |
| "learning_rate": 9.87683939825864e-06, | |
| "loss": 0.4888, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.4960212201591512, | |
| "grad_norm": 38.74998441973847, | |
| "learning_rate": 9.873409122102505e-06, | |
| "loss": 0.468, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4986737400530504, | |
| "grad_norm": 35.008093098415614, | |
| "learning_rate": 9.86993234194036e-06, | |
| "loss": 0.4246, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5013262599469496, | |
| "grad_norm": 27.804082583361943, | |
| "learning_rate": 9.866409090949023e-06, | |
| "loss": 0.3621, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5039787798408488, | |
| "grad_norm": 25.629862496043533, | |
| "learning_rate": 9.862839402748754e-06, | |
| "loss": 0.4143, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.506631299734748, | |
| "grad_norm": 41.827303798404294, | |
| "learning_rate": 9.859223311402937e-06, | |
| "loss": 0.4188, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5092838196286472, | |
| "grad_norm": 24.462284819105808, | |
| "learning_rate": 9.855560851417752e-06, | |
| "loss": 0.3295, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5119363395225465, | |
| "grad_norm": 22.79091787353534, | |
| "learning_rate": 9.851852057741846e-06, | |
| "loss": 0.2292, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5145888594164456, | |
| "grad_norm": 31.885324600524022, | |
| "learning_rate": 9.848096965766005e-06, | |
| "loss": 0.401, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 30.710948037812585, | |
| "learning_rate": 9.844295611322804e-06, | |
| "loss": 0.323, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.519893899204244, | |
| "grad_norm": 41.79890030360689, | |
| "learning_rate": 9.84044803068628e-06, | |
| "loss": 0.3313, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5225464190981433, | |
| "grad_norm": 22.535018094427123, | |
| "learning_rate": 9.836554260571577e-06, | |
| "loss": 0.3267, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5251989389920424, | |
| "grad_norm": 34.92359618277551, | |
| "learning_rate": 9.832614338134595e-06, | |
| "loss": 0.4107, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5278514588859416, | |
| "grad_norm": 38.47784571433333, | |
| "learning_rate": 9.828628300971639e-06, | |
| "loss": 0.3746, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5305039787798409, | |
| "grad_norm": 51.09946363547108, | |
| "learning_rate": 9.82459618711906e-06, | |
| "loss": 0.425, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.53315649867374, | |
| "grad_norm": 31.35834407033397, | |
| "learning_rate": 9.82051803505289e-06, | |
| "loss": 0.3532, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.5358090185676393, | |
| "grad_norm": 26.935093809636733, | |
| "learning_rate": 9.816393883688475e-06, | |
| "loss": 0.2851, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 22.52622655874006, | |
| "learning_rate": 9.812223772380107e-06, | |
| "loss": 0.2908, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.5411140583554377, | |
| "grad_norm": 51.489094847599844, | |
| "learning_rate": 9.808007740920647e-06, | |
| "loss": 0.5435, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.5437665782493368, | |
| "grad_norm": 59.633888635972156, | |
| "learning_rate": 9.803745829541138e-06, | |
| "loss": 0.3766, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.5464190981432361, | |
| "grad_norm": 38.02033428452659, | |
| "learning_rate": 9.799438078910433e-06, | |
| "loss": 0.2728, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.5490716180371353, | |
| "grad_norm": 51.48954310514882, | |
| "learning_rate": 9.795084530134801e-06, | |
| "loss": 0.2818, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 53.46148414443766, | |
| "learning_rate": 9.790685224757534e-06, | |
| "loss": 0.4253, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.5543766578249337, | |
| "grad_norm": 43.35134143380862, | |
| "learning_rate": 9.786240204758552e-06, | |
| "loss": 0.3331, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.5570291777188329, | |
| "grad_norm": 57.29778955186939, | |
| "learning_rate": 9.781749512554e-06, | |
| "loss": 0.3571, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5596816976127321, | |
| "grad_norm": 49.20361109100751, | |
| "learning_rate": 9.777213190995849e-06, | |
| "loss": 0.4044, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.5623342175066313, | |
| "grad_norm": 38.82482405915261, | |
| "learning_rate": 9.772631283371481e-06, | |
| "loss": 0.3374, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.5649867374005305, | |
| "grad_norm": 48.021781844446835, | |
| "learning_rate": 9.768003833403278e-06, | |
| "loss": 0.3962, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.5676392572944297, | |
| "grad_norm": 52.28939094494638, | |
| "learning_rate": 9.763330885248206e-06, | |
| "loss": 0.2568, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.5702917771883289, | |
| "grad_norm": 58.18428330420366, | |
| "learning_rate": 9.758612483497395e-06, | |
| "loss": 0.4075, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.5729442970822282, | |
| "grad_norm": 65.91359809789627, | |
| "learning_rate": 9.753848673175707e-06, | |
| "loss": 0.3884, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.5755968169761273, | |
| "grad_norm": 85.87965148754081, | |
| "learning_rate": 9.749039499741313e-06, | |
| "loss": 0.3771, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.5782493368700266, | |
| "grad_norm": 173.13163239297148, | |
| "learning_rate": 9.744185009085258e-06, | |
| "loss": 0.4767, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.5809018567639257, | |
| "grad_norm": 74.19989440496742, | |
| "learning_rate": 9.739285247531019e-06, | |
| "loss": 0.5952, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.583554376657825, | |
| "grad_norm": 67.181613202375, | |
| "learning_rate": 9.734340261834068e-06, | |
| "loss": 0.4403, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 87.56458995512162, | |
| "learning_rate": 9.72935009918142e-06, | |
| "loss": 0.3334, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.5888594164456233, | |
| "grad_norm": 98.26463421494698, | |
| "learning_rate": 9.724314807191197e-06, | |
| "loss": 0.5103, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.5915119363395226, | |
| "grad_norm": 371.4672878296043, | |
| "learning_rate": 9.719234433912148e-06, | |
| "loss": 0.6872, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.5941644562334217, | |
| "grad_norm": 87.40317227687746, | |
| "learning_rate": 9.714109027823218e-06, | |
| "loss": 0.4359, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.596816976127321, | |
| "grad_norm": 141.95076404109608, | |
| "learning_rate": 9.708938637833065e-06, | |
| "loss": 0.4145, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5994694960212201, | |
| "grad_norm": 64.57421523606523, | |
| "learning_rate": 9.703723313279607e-06, | |
| "loss": 0.3707, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6021220159151194, | |
| "grad_norm": 79.25888498842731, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.3337, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6047745358090185, | |
| "grad_norm": 138.38119877155972, | |
| "learning_rate": 9.693158059977879e-06, | |
| "loss": 0.3935, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6074270557029178, | |
| "grad_norm": 62.52742051706753, | |
| "learning_rate": 9.687808232047452e-06, | |
| "loss": 0.4193, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.610079575596817, | |
| "grad_norm": 73.92852240551677, | |
| "learning_rate": 9.682413671188444e-06, | |
| "loss": 0.3691, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6127320954907162, | |
| "grad_norm": 103.53554748059803, | |
| "learning_rate": 9.6769744288779e-06, | |
| "loss": 0.3535, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 42.7125473553319, | |
| "learning_rate": 9.671490557019234e-06, | |
| "loss": 0.3155, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6180371352785146, | |
| "grad_norm": 54.829192160820845, | |
| "learning_rate": 9.665962107941725e-06, | |
| "loss": 0.2673, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 305.3150318928213, | |
| "learning_rate": 9.660389134400034e-06, | |
| "loss": 0.2952, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.623342175066313, | |
| "grad_norm": 212.30409368555127, | |
| "learning_rate": 9.654771689573685e-06, | |
| "loss": 0.3867, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.6259946949602122, | |
| "grad_norm": 105.43931388039499, | |
| "learning_rate": 9.649109827066572e-06, | |
| "loss": 0.3817, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.6286472148541115, | |
| "grad_norm": 81.59794116200509, | |
| "learning_rate": 9.643403600906433e-06, | |
| "loss": 0.4814, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.6312997347480106, | |
| "grad_norm": 214.11174329135335, | |
| "learning_rate": 9.637653065544349e-06, | |
| "loss": 0.371, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.6339522546419099, | |
| "grad_norm": 118.89467666014248, | |
| "learning_rate": 9.63185827585421e-06, | |
| "loss": 0.3935, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.636604774535809, | |
| "grad_norm": 74.98460803487606, | |
| "learning_rate": 9.626019287132202e-06, | |
| "loss": 0.4457, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.6392572944297082, | |
| "grad_norm": 41.472870074642465, | |
| "learning_rate": 9.620136155096276e-06, | |
| "loss": 0.2736, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.6419098143236074, | |
| "grad_norm": 74.2956558934981, | |
| "learning_rate": 9.614208935885615e-06, | |
| "loss": 0.5428, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.6445623342175066, | |
| "grad_norm": 127.0632945401361, | |
| "learning_rate": 9.608237686060099e-06, | |
| "loss": 0.4162, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.6472148541114059, | |
| "grad_norm": 103.70620760268216, | |
| "learning_rate": 9.602222462599768e-06, | |
| "loss": 0.5392, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.649867374005305, | |
| "grad_norm": 90.43287095478969, | |
| "learning_rate": 9.59616332290427e-06, | |
| "loss": 0.3134, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.6525198938992043, | |
| "grad_norm": 254.77023599890552, | |
| "learning_rate": 9.590060324792328e-06, | |
| "loss": 0.329, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 58.742322679359965, | |
| "learning_rate": 9.58391352650117e-06, | |
| "loss": 0.3592, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.6578249336870027, | |
| "grad_norm": 98.73104949830449, | |
| "learning_rate": 9.577722986685992e-06, | |
| "loss": 0.331, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.6604774535809018, | |
| "grad_norm": 109.9795313881565, | |
| "learning_rate": 9.571488764419381e-06, | |
| "loss": 0.3457, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.6631299734748011, | |
| "grad_norm": 60.5343482414627, | |
| "learning_rate": 9.565210919190764e-06, | |
| "loss": 0.3269, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.6657824933687002, | |
| "grad_norm": 35.54885592977575, | |
| "learning_rate": 9.558889510905836e-06, | |
| "loss": 0.2986, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.6684350132625995, | |
| "grad_norm": 36.55925854943744, | |
| "learning_rate": 9.552524599885982e-06, | |
| "loss": 0.2985, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.6710875331564987, | |
| "grad_norm": 54.695899509794316, | |
| "learning_rate": 9.546116246867716e-06, | |
| "loss": 0.2343, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.6737400530503979, | |
| "grad_norm": 60.49903065247721, | |
| "learning_rate": 9.539664513002085e-06, | |
| "loss": 0.2674, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.6763925729442971, | |
| "grad_norm": 62.93317082598356, | |
| "learning_rate": 9.5331694598541e-06, | |
| "loss": 0.3461, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.6790450928381963, | |
| "grad_norm": 116.04475232722392, | |
| "learning_rate": 9.526631149402135e-06, | |
| "loss": 0.3104, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.6816976127320955, | |
| "grad_norm": 67.25880114731598, | |
| "learning_rate": 9.520049644037349e-06, | |
| "loss": 0.3375, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.6843501326259946, | |
| "grad_norm": 71.72910226758626, | |
| "learning_rate": 9.51342500656308e-06, | |
| "loss": 0.3797, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.6870026525198939, | |
| "grad_norm": 46.44975219408069, | |
| "learning_rate": 9.506757300194249e-06, | |
| "loss": 0.2771, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 106.56600861392955, | |
| "learning_rate": 9.500046588556762e-06, | |
| "loss": 0.3469, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 60.83080447251247, | |
| "learning_rate": 9.493292935686896e-06, | |
| "loss": 0.3156, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.6949602122015915, | |
| "grad_norm": 33.135336113479035, | |
| "learning_rate": 9.486496406030687e-06, | |
| "loss": 0.3143, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.6976127320954907, | |
| "grad_norm": 75.21806103795748, | |
| "learning_rate": 9.479657064443321e-06, | |
| "loss": 0.5503, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7002652519893899, | |
| "grad_norm": 53.35765416421378, | |
| "learning_rate": 9.472774976188515e-06, | |
| "loss": 0.335, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7029177718832891, | |
| "grad_norm": 23.697336562644033, | |
| "learning_rate": 9.46585020693789e-06, | |
| "loss": 0.2394, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7055702917771883, | |
| "grad_norm": 29.870651851658987, | |
| "learning_rate": 9.458882822770342e-06, | |
| "loss": 0.3299, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7082228116710876, | |
| "grad_norm": 41.11458931120419, | |
| "learning_rate": 9.451872890171419e-06, | |
| "loss": 0.3131, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.7108753315649867, | |
| "grad_norm": 28.525249893009217, | |
| "learning_rate": 9.444820476032687e-06, | |
| "loss": 0.3914, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.713527851458886, | |
| "grad_norm": 32.78558988940678, | |
| "learning_rate": 9.43772564765108e-06, | |
| "loss": 0.3668, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.7161803713527851, | |
| "grad_norm": 32.87361891444267, | |
| "learning_rate": 9.430588472728271e-06, | |
| "loss": 0.4, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7188328912466844, | |
| "grad_norm": 34.95801441555627, | |
| "learning_rate": 9.423409019370015e-06, | |
| "loss": 0.3718, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.7214854111405835, | |
| "grad_norm": 33.71518005464951, | |
| "learning_rate": 9.416187356085513e-06, | |
| "loss": 0.3528, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 24.622857735134367, | |
| "learning_rate": 9.408923551786742e-06, | |
| "loss": 0.2989, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.726790450928382, | |
| "grad_norm": 20.513102908220276, | |
| "learning_rate": 9.401617675787812e-06, | |
| "loss": 0.2849, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.7294429708222812, | |
| "grad_norm": 49.62935372844656, | |
| "learning_rate": 9.39426979780429e-06, | |
| "loss": 0.3088, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.7320954907161804, | |
| "grad_norm": 99.4036166043459, | |
| "learning_rate": 9.386879987952549e-06, | |
| "loss": 0.3753, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.7347480106100795, | |
| "grad_norm": 37.958768445614425, | |
| "learning_rate": 9.379448316749092e-06, | |
| "loss": 0.4278, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.7374005305039788, | |
| "grad_norm": 22.952448667294956, | |
| "learning_rate": 9.371974855109876e-06, | |
| "loss": 0.3055, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.7400530503978779, | |
| "grad_norm": 32.39914140080223, | |
| "learning_rate": 9.364459674349642e-06, | |
| "loss": 0.39, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.7427055702917772, | |
| "grad_norm": 262.64318764193524, | |
| "learning_rate": 9.356902846181229e-06, | |
| "loss": 0.3677, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.7453580901856764, | |
| "grad_norm": 22.215532388982677, | |
| "learning_rate": 9.349304442714895e-06, | |
| "loss": 0.2969, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.7480106100795756, | |
| "grad_norm": 17.778109461565176, | |
| "learning_rate": 9.341664536457626e-06, | |
| "loss": 0.271, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.7506631299734748, | |
| "grad_norm": 29.2037598726396, | |
| "learning_rate": 9.33398320031244e-06, | |
| "loss": 0.3859, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.753315649867374, | |
| "grad_norm": 22.099580190625854, | |
| "learning_rate": 9.326260507577702e-06, | |
| "loss": 0.2939, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.7559681697612732, | |
| "grad_norm": 35.83350243227712, | |
| "learning_rate": 9.318496531946411e-06, | |
| "loss": 0.3866, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 23.56156299043329, | |
| "learning_rate": 9.310691347505506e-06, | |
| "loss": 0.2483, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.7612732095490716, | |
| "grad_norm": 19.983478638726627, | |
| "learning_rate": 9.30284502873516e-06, | |
| "loss": 0.3167, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.7639257294429708, | |
| "grad_norm": 48.94714254686104, | |
| "learning_rate": 9.294957650508065e-06, | |
| "loss": 0.3196, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.76657824933687, | |
| "grad_norm": 16.30906262917392, | |
| "learning_rate": 9.287029288088716e-06, | |
| "loss": 0.2335, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 25.475699429435473, | |
| "learning_rate": 9.279060017132698e-06, | |
| "loss": 0.2862, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.7718832891246684, | |
| "grad_norm": 28.384146206526353, | |
| "learning_rate": 9.27104991368596e-06, | |
| "loss": 0.2908, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.7745358090185677, | |
| "grad_norm": 48.12691541658247, | |
| "learning_rate": 9.262999054184093e-06, | |
| "loss": 0.2833, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.7771883289124668, | |
| "grad_norm": 44.07215651045462, | |
| "learning_rate": 9.254907515451593e-06, | |
| "loss": 0.2849, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.7798408488063661, | |
| "grad_norm": 39.591136153451586, | |
| "learning_rate": 9.246775374701139e-06, | |
| "loss": 0.2239, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.7824933687002652, | |
| "grad_norm": 25.79331189444745, | |
| "learning_rate": 9.238602709532851e-06, | |
| "loss": 0.1937, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.7851458885941645, | |
| "grad_norm": 17.94060621935374, | |
| "learning_rate": 9.230389597933545e-06, | |
| "loss": 0.2606, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.7877984084880637, | |
| "grad_norm": 24.95938848211813, | |
| "learning_rate": 9.222136118275996e-06, | |
| "loss": 0.3266, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.7904509283819628, | |
| "grad_norm": 23.220036606199297, | |
| "learning_rate": 9.213842349318185e-06, | |
| "loss": 0.2797, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 22.330136839566496, | |
| "learning_rate": 9.205508370202552e-06, | |
| "loss": 0.3015, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.7957559681697612, | |
| "grad_norm": 32.818473260764726, | |
| "learning_rate": 9.197134260455233e-06, | |
| "loss": 0.3396, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7984084880636605, | |
| "grad_norm": 36.20294272608413, | |
| "learning_rate": 9.188720099985316e-06, | |
| "loss": 0.3389, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.8010610079575596, | |
| "grad_norm": 43.50941855978113, | |
| "learning_rate": 9.180265969084058e-06, | |
| "loss": 0.3658, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.8037135278514589, | |
| "grad_norm": 27.768953084611958, | |
| "learning_rate": 9.171771948424138e-06, | |
| "loss": 0.2811, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.8063660477453581, | |
| "grad_norm": 19.52078292934591, | |
| "learning_rate": 9.163238119058873e-06, | |
| "loss": 0.2006, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.8090185676392573, | |
| "grad_norm": 29.981050417557388, | |
| "learning_rate": 9.154664562421453e-06, | |
| "loss": 0.4175, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.8116710875331565, | |
| "grad_norm": 40.14474678185231, | |
| "learning_rate": 9.146051360324166e-06, | |
| "loss": 0.355, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.8143236074270557, | |
| "grad_norm": 33.53880491765791, | |
| "learning_rate": 9.137398594957605e-06, | |
| "loss": 0.3023, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.8169761273209549, | |
| "grad_norm": 27.011343413853503, | |
| "learning_rate": 9.128706348889895e-06, | |
| "loss": 0.3198, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.8196286472148541, | |
| "grad_norm": 31.239861629198902, | |
| "learning_rate": 9.119974705065902e-06, | |
| "loss": 0.2698, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.8222811671087533, | |
| "grad_norm": 160.25189491094991, | |
| "learning_rate": 9.111203746806439e-06, | |
| "loss": 0.2455, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8249336870026526, | |
| "grad_norm": 19.517004117689925, | |
| "learning_rate": 9.102393557807476e-06, | |
| "loss": 0.2269, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 31.404118966904665, | |
| "learning_rate": 9.093544222139338e-06, | |
| "loss": 0.3547, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.830238726790451, | |
| "grad_norm": 23.04361811420225, | |
| "learning_rate": 9.084655824245899e-06, | |
| "loss": 0.2616, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.8328912466843501, | |
| "grad_norm": 19.480645791210744, | |
| "learning_rate": 9.075728448943783e-06, | |
| "loss": 0.226, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.8355437665782494, | |
| "grad_norm": 30.320644718264788, | |
| "learning_rate": 9.066762181421552e-06, | |
| "loss": 0.2461, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.8381962864721485, | |
| "grad_norm": 34.8700120321434, | |
| "learning_rate": 9.057757107238897e-06, | |
| "loss": 0.3387, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.8408488063660478, | |
| "grad_norm": 52.98866951359855, | |
| "learning_rate": 9.048713312325806e-06, | |
| "loss": 0.372, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.843501326259947, | |
| "grad_norm": 33.12902942097088, | |
| "learning_rate": 9.039630882981769e-06, | |
| "loss": 0.2694, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 49.80785762572212, | |
| "learning_rate": 9.030509905874934e-06, | |
| "loss": 0.4448, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.8488063660477454, | |
| "grad_norm": 48.26008386302785, | |
| "learning_rate": 9.021350468041287e-06, | |
| "loss": 0.4231, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.8514588859416445, | |
| "grad_norm": 47.68623752795164, | |
| "learning_rate": 9.012152656883824e-06, | |
| "loss": 0.2507, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.8541114058355438, | |
| "grad_norm": 29.364366147300288, | |
| "learning_rate": 9.002916560171713e-06, | |
| "loss": 0.3595, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.8567639257294429, | |
| "grad_norm": 20.836229758537232, | |
| "learning_rate": 8.993642266039457e-06, | |
| "loss": 0.2587, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.8594164456233422, | |
| "grad_norm": 42.67691753843682, | |
| "learning_rate": 8.984329862986056e-06, | |
| "loss": 0.4073, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 27.185261724802686, | |
| "learning_rate": 8.974979439874161e-06, | |
| "loss": 0.3171, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.8647214854111406, | |
| "grad_norm": 19.41663669746958, | |
| "learning_rate": 8.965591085929222e-06, | |
| "loss": 0.2436, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.8673740053050398, | |
| "grad_norm": 24.66264338874257, | |
| "learning_rate": 8.956164890738643e-06, | |
| "loss": 0.1981, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.870026525198939, | |
| "grad_norm": 47.7679626658194, | |
| "learning_rate": 8.946700944250925e-06, | |
| "loss": 0.4011, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.8726790450928382, | |
| "grad_norm": 27.493035563524856, | |
| "learning_rate": 8.937199336774805e-06, | |
| "loss": 0.378, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.8753315649867374, | |
| "grad_norm": 33.915658193431625, | |
| "learning_rate": 8.927660158978392e-06, | |
| "loss": 0.2557, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.8779840848806366, | |
| "grad_norm": 34.75387246545008, | |
| "learning_rate": 8.918083501888318e-06, | |
| "loss": 0.2806, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.8806366047745358, | |
| "grad_norm": 47.864807509921775, | |
| "learning_rate": 8.908469456888845e-06, | |
| "loss": 0.2115, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.883289124668435, | |
| "grad_norm": 23.19477545972193, | |
| "learning_rate": 8.898818115721009e-06, | |
| "loss": 0.2153, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.8859416445623343, | |
| "grad_norm": 29.645775504694704, | |
| "learning_rate": 8.889129570481742e-06, | |
| "loss": 0.1996, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.8885941644562334, | |
| "grad_norm": 33.88979624484442, | |
| "learning_rate": 8.879403913622996e-06, | |
| "loss": 0.44, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.8912466843501327, | |
| "grad_norm": 108.03362140730333, | |
| "learning_rate": 8.86964123795085e-06, | |
| "loss": 0.315, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.8938992042440318, | |
| "grad_norm": 26.650695163554616, | |
| "learning_rate": 8.859841636624632e-06, | |
| "loss": 0.3892, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 99.55841306535598, | |
| "learning_rate": 8.850005203156035e-06, | |
| "loss": 0.3036, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.8992042440318302, | |
| "grad_norm": 48.33341336310134, | |
| "learning_rate": 8.84013203140821e-06, | |
| "loss": 0.2906, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.9018567639257294, | |
| "grad_norm": 32.637651201360114, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.2077, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.9045092838196287, | |
| "grad_norm": 25.534166507812365, | |
| "learning_rate": 8.820275850279473e-06, | |
| "loss": 0.1985, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.9071618037135278, | |
| "grad_norm": 19.29296435033772, | |
| "learning_rate": 8.810293030374126e-06, | |
| "loss": 0.15, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.9098143236074271, | |
| "grad_norm": 27.61534726493821, | |
| "learning_rate": 8.800273851138882e-06, | |
| "loss": 0.2247, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.9124668435013262, | |
| "grad_norm": 25.349121736508582, | |
| "learning_rate": 8.790218408180736e-06, | |
| "loss": 0.1389, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.9151193633952255, | |
| "grad_norm": 19.504694513734783, | |
| "learning_rate": 8.780126797452713e-06, | |
| "loss": 0.1762, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.9177718832891246, | |
| "grad_norm": 30.50787858362382, | |
| "learning_rate": 8.769999115252976e-06, | |
| "loss": 0.1956, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.9204244031830239, | |
| "grad_norm": 20.104267575195813, | |
| "learning_rate": 8.759835458223889e-06, | |
| "loss": 0.1143, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 13.594728975157095, | |
| "learning_rate": 8.749635923351108e-06, | |
| "loss": 0.0834, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.9257294429708223, | |
| "grad_norm": 29.496882045130082, | |
| "learning_rate": 8.739400607962644e-06, | |
| "loss": 0.2157, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.9283819628647215, | |
| "grad_norm": 35.773188660484834, | |
| "learning_rate": 8.729129609727948e-06, | |
| "loss": 0.3571, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 13.875590781519865, | |
| "learning_rate": 8.71882302665696e-06, | |
| "loss": 0.0751, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.9336870026525199, | |
| "grad_norm": 44.46404138029754, | |
| "learning_rate": 8.708480957099195e-06, | |
| "loss": 0.1722, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.9363395225464191, | |
| "grad_norm": 45.19632637637808, | |
| "learning_rate": 8.698103499742785e-06, | |
| "loss": 0.3458, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.9389920424403183, | |
| "grad_norm": 26.55265455350115, | |
| "learning_rate": 8.687690753613554e-06, | |
| "loss": 0.294, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.9416445623342176, | |
| "grad_norm": 28.758380748351815, | |
| "learning_rate": 8.677242818074064e-06, | |
| "loss": 0.167, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.9442970822281167, | |
| "grad_norm": 28.435166312141938, | |
| "learning_rate": 8.666759792822662e-06, | |
| "loss": 0.2465, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.946949602122016, | |
| "grad_norm": 43.70197021130316, | |
| "learning_rate": 8.656241777892544e-06, | |
| "loss": 0.2928, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.9496021220159151, | |
| "grad_norm": 32.384482978840225, | |
| "learning_rate": 8.645688873650785e-06, | |
| "loss": 0.23, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.9522546419098143, | |
| "grad_norm": 29.50639836832452, | |
| "learning_rate": 8.635101180797391e-06, | |
| "loss": 0.0831, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.9549071618037135, | |
| "grad_norm": 27.98334313472022, | |
| "learning_rate": 8.624478800364332e-06, | |
| "loss": 0.3275, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.9575596816976127, | |
| "grad_norm": 28.204464260730454, | |
| "learning_rate": 8.613821833714584e-06, | |
| "loss": 0.1508, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.9602122015915119, | |
| "grad_norm": 25.1275557521092, | |
| "learning_rate": 8.603130382541156e-06, | |
| "loss": 0.2585, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.9628647214854111, | |
| "grad_norm": 14.538485052093858, | |
| "learning_rate": 8.592404548866123e-06, | |
| "loss": 0.1698, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 38.7193182526393, | |
| "learning_rate": 8.581644435039652e-06, | |
| "loss": 0.2151, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.9681697612732095, | |
| "grad_norm": 43.83895037233051, | |
| "learning_rate": 8.570850143739022e-06, | |
| "loss": 0.2332, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.9708222811671088, | |
| "grad_norm": 30.371526624539523, | |
| "learning_rate": 8.56002177796765e-06, | |
| "loss": 0.4292, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.9734748010610079, | |
| "grad_norm": 28.866647333264957, | |
| "learning_rate": 8.549159441054105e-06, | |
| "loss": 0.3052, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.9761273209549072, | |
| "grad_norm": 17.088822690787353, | |
| "learning_rate": 8.538263236651119e-06, | |
| "loss": 0.0924, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.9787798408488063, | |
| "grad_norm": 8.296167808657815, | |
| "learning_rate": 8.527333268734607e-06, | |
| "loss": 0.0425, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.9814323607427056, | |
| "grad_norm": 139.90331496389769, | |
| "learning_rate": 8.516369641602662e-06, | |
| "loss": 0.3744, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9840848806366048, | |
| "grad_norm": 35.92284504373762, | |
| "learning_rate": 8.505372459874572e-06, | |
| "loss": 0.4011, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.986737400530504, | |
| "grad_norm": 42.55708188098206, | |
| "learning_rate": 8.494341828489812e-06, | |
| "loss": 0.3883, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.9893899204244032, | |
| "grad_norm": 26.194586839898157, | |
| "learning_rate": 8.483277852707053e-06, | |
| "loss": 0.2485, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.9920424403183024, | |
| "grad_norm": 17.83869232418558, | |
| "learning_rate": 8.472180638103143e-06, | |
| "loss": 0.1974, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.9946949602122016, | |
| "grad_norm": 34.735683009320994, | |
| "learning_rate": 8.461050290572114e-06, | |
| "loss": 0.13, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.9973474801061007, | |
| "grad_norm": 26.532678956391074, | |
| "learning_rate": 8.449886916324168e-06, | |
| "loss": 0.3265, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 77.79014811596227, | |
| "learning_rate": 8.43869062188465e-06, | |
| "loss": 0.3241, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.5891069173812866, | |
| "eval_runtime": 176.0707, | |
| "eval_samples_per_second": 12.012, | |
| "eval_steps_per_second": 1.505, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0026525198938991, | |
| "grad_norm": 36.470611529759154, | |
| "learning_rate": 8.427461514093056e-06, | |
| "loss": 0.1684, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.0053050397877985, | |
| "grad_norm": 17.706888163127573, | |
| "learning_rate": 8.41619970010199e-06, | |
| "loss": 0.2765, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.0079575596816976, | |
| "grad_norm": 13.031570699168565, | |
| "learning_rate": 8.404905287376158e-06, | |
| "loss": 0.1101, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.0106100795755968, | |
| "grad_norm": 19.552662198077687, | |
| "learning_rate": 8.39357838369133e-06, | |
| "loss": 0.4447, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.013262599469496, | |
| "grad_norm": 17.815095847838503, | |
| "learning_rate": 8.382219097133323e-06, | |
| "loss": 0.2052, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.0159151193633953, | |
| "grad_norm": 12.505092725567371, | |
| "learning_rate": 8.370827536096966e-06, | |
| "loss": 0.0624, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.0185676392572944, | |
| "grad_norm": 11.478473259709562, | |
| "learning_rate": 8.359403809285054e-06, | |
| "loss": 0.1261, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.0212201591511936, | |
| "grad_norm": 8.521853145549802, | |
| "learning_rate": 8.347948025707331e-06, | |
| "loss": 0.0629, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.023872679045093, | |
| "grad_norm": 18.02070920605987, | |
| "learning_rate": 8.336460294679431e-06, | |
| "loss": 0.1441, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.026525198938992, | |
| "grad_norm": 17.00091095428731, | |
| "learning_rate": 8.324940725821853e-06, | |
| "loss": 0.1839, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.0291777188328912, | |
| "grad_norm": 7.130845145762619, | |
| "learning_rate": 8.313389429058895e-06, | |
| "loss": 0.0586, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.0318302387267904, | |
| "grad_norm": 9.12548545838751, | |
| "learning_rate": 8.301806514617622e-06, | |
| "loss": 0.1166, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 36.64605899376915, | |
| "learning_rate": 8.290192093026805e-06, | |
| "loss": 0.2631, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0371352785145889, | |
| "grad_norm": 30.0558772429006, | |
| "learning_rate": 8.27854627511587e-06, | |
| "loss": 0.1365, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.039787798408488, | |
| "grad_norm": 13.606408087389894, | |
| "learning_rate": 8.266869172013835e-06, | |
| "loss": 0.0601, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.0424403183023874, | |
| "grad_norm": 36.25256069833946, | |
| "learning_rate": 8.255160895148263e-06, | |
| "loss": 0.2423, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.0450928381962865, | |
| "grad_norm": 33.67722940279395, | |
| "learning_rate": 8.243421556244179e-06, | |
| "loss": 0.2654, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.0477453580901857, | |
| "grad_norm": 40.52545842308433, | |
| "learning_rate": 8.23165126732302e-06, | |
| "loss": 0.2066, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.0503978779840848, | |
| "grad_norm": 25.669838018256662, | |
| "learning_rate": 8.219850140701557e-06, | |
| "loss": 0.2277, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.0530503978779842, | |
| "grad_norm": 43.15769165903028, | |
| "learning_rate": 8.208018288990832e-06, | |
| "loss": 0.2268, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.0557029177718833, | |
| "grad_norm": 16.94140330545107, | |
| "learning_rate": 8.196155825095073e-06, | |
| "loss": 0.0841, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.0583554376657824, | |
| "grad_norm": 9.529747502579328, | |
| "learning_rate": 8.184262862210624e-06, | |
| "loss": 0.0631, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.0610079575596818, | |
| "grad_norm": 13.150162864905353, | |
| "learning_rate": 8.172339513824863e-06, | |
| "loss": 0.0842, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.063660477453581, | |
| "grad_norm": 11.45721931237483, | |
| "learning_rate": 8.160385893715113e-06, | |
| "loss": 0.1061, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.06631299734748, | |
| "grad_norm": 35.43358074136856, | |
| "learning_rate": 8.14840211594757e-06, | |
| "loss": 0.2425, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.0689655172413792, | |
| "grad_norm": 27.428682315233864, | |
| "learning_rate": 8.136388294876204e-06, | |
| "loss": 0.1924, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.0716180371352786, | |
| "grad_norm": 8.171222022680151, | |
| "learning_rate": 8.124344545141663e-06, | |
| "loss": 0.1284, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.0742705570291777, | |
| "grad_norm": 39.06536575690631, | |
| "learning_rate": 8.112270981670196e-06, | |
| "loss": 0.232, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 21.32963011576174, | |
| "learning_rate": 8.10016771967254e-06, | |
| "loss": 0.1055, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.079575596816976, | |
| "grad_norm": 27.22475799075306, | |
| "learning_rate": 8.088034874642834e-06, | |
| "loss": 0.1489, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.0822281167108754, | |
| "grad_norm": 14.3882842008674, | |
| "learning_rate": 8.075872562357502e-06, | |
| "loss": 0.0829, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.0848806366047745, | |
| "grad_norm": 58.56202943213565, | |
| "learning_rate": 8.063680898874158e-06, | |
| "loss": 0.3128, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.0875331564986737, | |
| "grad_norm": 68.2761310554643, | |
| "learning_rate": 8.051460000530501e-06, | |
| "loss": 0.1267, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.090185676392573, | |
| "grad_norm": 43.810216329803836, | |
| "learning_rate": 8.039209983943201e-06, | |
| "loss": 0.2736, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.0928381962864722, | |
| "grad_norm": 60.97125301465749, | |
| "learning_rate": 8.026930966006778e-06, | |
| "loss": 0.1616, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.0954907161803713, | |
| "grad_norm": 45.630973756286146, | |
| "learning_rate": 8.014623063892504e-06, | |
| "loss": 0.2629, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.0981432360742707, | |
| "grad_norm": 3.457495328921145, | |
| "learning_rate": 8.002286395047267e-06, | |
| "loss": 0.0236, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.1007957559681698, | |
| "grad_norm": 62.65821806193905, | |
| "learning_rate": 7.989921077192464e-06, | |
| "loss": 0.4217, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 22.66117796728126, | |
| "learning_rate": 7.97752722832287e-06, | |
| "loss": 0.1689, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.106100795755968, | |
| "grad_norm": 11.75462762962119, | |
| "learning_rate": 7.965104966705518e-06, | |
| "loss": 0.0972, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.1087533156498675, | |
| "grad_norm": 41.913827864061176, | |
| "learning_rate": 7.95265441087856e-06, | |
| "loss": 0.2382, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.1114058355437666, | |
| "grad_norm": 47.459409982780066, | |
| "learning_rate": 7.940175679650145e-06, | |
| "loss": 0.4864, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.1140583554376657, | |
| "grad_norm": 19.367649648717457, | |
| "learning_rate": 7.927668892097288e-06, | |
| "loss": 0.0742, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.1167108753315649, | |
| "grad_norm": 34.18374747743834, | |
| "learning_rate": 7.915134167564724e-06, | |
| "loss": 0.1962, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.1193633952254642, | |
| "grad_norm": 47.150789672019954, | |
| "learning_rate": 7.902571625663773e-06, | |
| "loss": 0.2737, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.1220159151193634, | |
| "grad_norm": 51.86857593447088, | |
| "learning_rate": 7.889981386271202e-06, | |
| "loss": 0.0715, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.1246684350132625, | |
| "grad_norm": 37.26299672015612, | |
| "learning_rate": 7.877363569528076e-06, | |
| "loss": 0.2618, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.1273209549071619, | |
| "grad_norm": 55.28707481699974, | |
| "learning_rate": 7.864718295838615e-06, | |
| "loss": 0.1249, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.129973474801061, | |
| "grad_norm": 84.16566896954009, | |
| "learning_rate": 7.852045685869046e-06, | |
| "loss": 0.4398, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.1326259946949602, | |
| "grad_norm": 55.8899032446642, | |
| "learning_rate": 7.839345860546448e-06, | |
| "loss": 0.3044, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.1352785145888595, | |
| "grad_norm": 10.659192422147509, | |
| "learning_rate": 7.826618941057597e-06, | |
| "loss": 0.0611, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.1379310344827587, | |
| "grad_norm": 42.57054449009807, | |
| "learning_rate": 7.81386504884782e-06, | |
| "loss": 0.1664, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.1405835543766578, | |
| "grad_norm": 27.750046981174442, | |
| "learning_rate": 7.80108430561982e-06, | |
| "loss": 0.1654, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.143236074270557, | |
| "grad_norm": 24.9396201345219, | |
| "learning_rate": 7.788276833332527e-06, | |
| "loss": 0.1293, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.1458885941644563, | |
| "grad_norm": 20.34868157498092, | |
| "learning_rate": 7.775442754199929e-06, | |
| "loss": 0.1705, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.1485411140583555, | |
| "grad_norm": 58.399543578541476, | |
| "learning_rate": 7.762582190689912e-06, | |
| "loss": 0.156, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.1511936339522546, | |
| "grad_norm": 25.459151955023966, | |
| "learning_rate": 7.749695265523076e-06, | |
| "loss": 0.2423, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 23.22794672451964, | |
| "learning_rate": 7.736782101671587e-06, | |
| "loss": 0.1002, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.156498673740053, | |
| "grad_norm": 19.872411993502894, | |
| "learning_rate": 7.723842822357982e-06, | |
| "loss": 0.078, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.1591511936339522, | |
| "grad_norm": 6.868162120751368, | |
| "learning_rate": 7.710877551054004e-06, | |
| "loss": 0.0298, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.1618037135278514, | |
| "grad_norm": 26.292541068653883, | |
| "learning_rate": 7.697886411479422e-06, | |
| "loss": 0.1934, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.1644562334217508, | |
| "grad_norm": 21.43672508582472, | |
| "learning_rate": 7.684869527600856e-06, | |
| "loss": 0.1535, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.16710875331565, | |
| "grad_norm": 16.62769259399857, | |
| "learning_rate": 7.67182702363058e-06, | |
| "loss": 0.1433, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.169761273209549, | |
| "grad_norm": 16.685896603459977, | |
| "learning_rate": 7.658759024025349e-06, | |
| "loss": 0.0708, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.1724137931034484, | |
| "grad_norm": 26.005011847487346, | |
| "learning_rate": 7.645665653485205e-06, | |
| "loss": 0.199, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.1750663129973475, | |
| "grad_norm": 5.716604464026939, | |
| "learning_rate": 7.632547036952296e-06, | |
| "loss": 0.0217, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.1777188328912467, | |
| "grad_norm": 36.897628289818144, | |
| "learning_rate": 7.6194032996096685e-06, | |
| "loss": 0.2061, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.1803713527851458, | |
| "grad_norm": 28.876741113162478, | |
| "learning_rate": 7.606234566880089e-06, | |
| "loss": 0.13, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.1830238726790452, | |
| "grad_norm": 19.080592510468772, | |
| "learning_rate": 7.593040964424836e-06, | |
| "loss": 0.1685, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.1856763925729443, | |
| "grad_norm": 29.96186333981723, | |
| "learning_rate": 7.579822618142505e-06, | |
| "loss": 0.1566, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.1883289124668435, | |
| "grad_norm": 32.8034743799736, | |
| "learning_rate": 7.5665796541678106e-06, | |
| "loss": 0.1605, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.1909814323607426, | |
| "grad_norm": 27.661953801989608, | |
| "learning_rate": 7.553312198870373e-06, | |
| "loss": 0.0956, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.193633952254642, | |
| "grad_norm": 12.306782289979477, | |
| "learning_rate": 7.540020378853523e-06, | |
| "loss": 0.0777, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.1962864721485411, | |
| "grad_norm": 16.700054834521364, | |
| "learning_rate": 7.526704320953091e-06, | |
| "loss": 0.2292, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.1989389920424403, | |
| "grad_norm": 18.34021244065456, | |
| "learning_rate": 7.513364152236185e-06, | |
| "loss": 0.1332, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.2015915119363396, | |
| "grad_norm": 17.67473447987808, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.0685, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.2042440318302388, | |
| "grad_norm": 18.160624034169683, | |
| "learning_rate": 7.486611991770586e-06, | |
| "loss": 0.0866, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 30.183878085653877, | |
| "learning_rate": 7.473200255301635e-06, | |
| "loss": 0.1514, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.209549071618037, | |
| "grad_norm": 11.560796164676075, | |
| "learning_rate": 7.459764918573264e-06, | |
| "loss": 0.0702, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.2122015915119364, | |
| "grad_norm": 110.73822732726704, | |
| "learning_rate": 7.446306109790798e-06, | |
| "loss": 0.0581, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.2148541114058355, | |
| "grad_norm": 87.5160478741724, | |
| "learning_rate": 7.432823957383533e-06, | |
| "loss": 0.2227, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.2175066312997347, | |
| "grad_norm": 60.30264082510386, | |
| "learning_rate": 7.419318590003524e-06, | |
| "loss": 0.1127, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.2201591511936338, | |
| "grad_norm": 21.619284863772585, | |
| "learning_rate": 7.405790136524353e-06, | |
| "loss": 0.0821, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.2228116710875332, | |
| "grad_norm": 56.712783723095264, | |
| "learning_rate": 7.392238726039897e-06, | |
| "loss": 0.1583, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.2254641909814323, | |
| "grad_norm": 24.170384444203012, | |
| "learning_rate": 7.3786644878631035e-06, | |
| "loss": 0.1189, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.2281167108753315, | |
| "grad_norm": 10.562962951144847, | |
| "learning_rate": 7.365067551524739e-06, | |
| "loss": 0.0449, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 99.528184771969, | |
| "learning_rate": 7.3514480467721786e-06, | |
| "loss": 0.0788, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.23342175066313, | |
| "grad_norm": 12.34676892311528, | |
| "learning_rate": 7.3378061035681415e-06, | |
| "loss": 0.0469, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.2360742705570291, | |
| "grad_norm": 68.2832497273848, | |
| "learning_rate": 7.324141852089473e-06, | |
| "loss": 0.1851, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.2387267904509285, | |
| "grad_norm": 11.108520534758835, | |
| "learning_rate": 7.3104554227258895e-06, | |
| "loss": 0.0882, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.2413793103448276, | |
| "grad_norm": 42.060698163435674, | |
| "learning_rate": 7.296746946078737e-06, | |
| "loss": 0.2728, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.2440318302387268, | |
| "grad_norm": 17.56387098625625, | |
| "learning_rate": 7.283016552959745e-06, | |
| "loss": 0.0696, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.246684350132626, | |
| "grad_norm": 30.23208035695815, | |
| "learning_rate": 7.269264374389781e-06, | |
| "loss": 0.1132, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.2493368700265253, | |
| "grad_norm": 26.412011305182677, | |
| "learning_rate": 7.255490541597594e-06, | |
| "loss": 0.1062, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.2519893899204244, | |
| "grad_norm": 42.983237948628, | |
| "learning_rate": 7.2416951860185735e-06, | |
| "loss": 0.1112, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.2546419098143236, | |
| "grad_norm": 5.851594403240968, | |
| "learning_rate": 7.2278784392934775e-06, | |
| "loss": 0.0579, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.2572944297082227, | |
| "grad_norm": 42.07578121579949, | |
| "learning_rate": 7.2140404332671986e-06, | |
| "loss": 0.0693, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.259946949602122, | |
| "grad_norm": 46.7174671935105, | |
| "learning_rate": 7.200181299987483e-06, | |
| "loss": 0.1177, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.2625994694960212, | |
| "grad_norm": 27.379743656894938, | |
| "learning_rate": 7.186301171703689e-06, | |
| "loss": 0.1629, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.2652519893899203, | |
| "grad_norm": 30.216270759458997, | |
| "learning_rate": 7.172400180865514e-06, | |
| "loss": 0.0605, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.2679045092838197, | |
| "grad_norm": 20.806360193025363, | |
| "learning_rate": 7.158478460121735e-06, | |
| "loss": 0.0519, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.2705570291777188, | |
| "grad_norm": 10.651496274071377, | |
| "learning_rate": 7.144536142318945e-06, | |
| "loss": 0.052, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.273209549071618, | |
| "grad_norm": 23.494134794931725, | |
| "learning_rate": 7.130573360500277e-06, | |
| "loss": 0.1685, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.2758620689655173, | |
| "grad_norm": 24.791552622223954, | |
| "learning_rate": 7.116590247904144e-06, | |
| "loss": 0.0613, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.2785145888594165, | |
| "grad_norm": 6.621282957067961, | |
| "learning_rate": 7.102586937962961e-06, | |
| "loss": 0.0403, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.2811671087533156, | |
| "grad_norm": 23.984601170750345, | |
| "learning_rate": 7.088563564301874e-06, | |
| "loss": 0.07, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.2838196286472148, | |
| "grad_norm": 9.929247414366557, | |
| "learning_rate": 7.074520260737487e-06, | |
| "loss": 0.0464, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.2864721485411141, | |
| "grad_norm": 12.867645506911478, | |
| "learning_rate": 7.060457161276581e-06, | |
| "loss": 0.0985, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.2891246684350133, | |
| "grad_norm": 7.624514156491183, | |
| "learning_rate": 7.046374400114842e-06, | |
| "loss": 0.0249, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.2917771883289124, | |
| "grad_norm": 30.32207231686849, | |
| "learning_rate": 7.032272111635565e-06, | |
| "loss": 0.1042, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.2944297082228116, | |
| "grad_norm": 57.229453179332594, | |
| "learning_rate": 7.018150430408394e-06, | |
| "loss": 0.4131, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.297082228116711, | |
| "grad_norm": 17.842806157709017, | |
| "learning_rate": 7.004009491188023e-06, | |
| "loss": 0.0452, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.29973474801061, | |
| "grad_norm": 21.44800192709656, | |
| "learning_rate": 6.989849428912908e-06, | |
| "loss": 0.037, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.3023872679045092, | |
| "grad_norm": 34.16602355273998, | |
| "learning_rate": 6.975670378703993e-06, | |
| "loss": 0.2023, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.3050397877984086, | |
| "grad_norm": 26.85872386445106, | |
| "learning_rate": 6.961472475863406e-06, | |
| "loss": 0.1479, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 27.914655387462602, | |
| "learning_rate": 6.947255855873176e-06, | |
| "loss": 0.1027, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 8.47195867510813, | |
| "learning_rate": 6.933020654393941e-06, | |
| "loss": 0.0626, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.3129973474801062, | |
| "grad_norm": 15.786375616446254, | |
| "learning_rate": 6.918767007263646e-06, | |
| "loss": 0.1095, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.3156498673740054, | |
| "grad_norm": 14.966235305898794, | |
| "learning_rate": 6.904495050496258e-06, | |
| "loss": 0.086, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.3183023872679045, | |
| "grad_norm": 8.971458193639767, | |
| "learning_rate": 6.8902049202804574e-06, | |
| "loss": 0.0561, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.3209549071618036, | |
| "grad_norm": 17.554889727273423, | |
| "learning_rate": 6.875896752978345e-06, | |
| "loss": 0.0503, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.323607427055703, | |
| "grad_norm": 15.147440419341597, | |
| "learning_rate": 6.861570685124135e-06, | |
| "loss": 0.0494, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.3262599469496021, | |
| "grad_norm": 27.20170910120345, | |
| "learning_rate": 6.847226853422863e-06, | |
| "loss": 0.0252, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.3289124668435013, | |
| "grad_norm": 22.553693373205064, | |
| "learning_rate": 6.832865394749065e-06, | |
| "loss": 0.0806, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.3315649867374004, | |
| "grad_norm": 44.035317078004645, | |
| "learning_rate": 6.8184864461454866e-06, | |
| "loss": 0.0409, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.3342175066312998, | |
| "grad_norm": 45.74768341442066, | |
| "learning_rate": 6.804090144821772e-06, | |
| "loss": 0.1278, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.336870026525199, | |
| "grad_norm": 63.72486362138629, | |
| "learning_rate": 6.7896766281531435e-06, | |
| "loss": 0.3504, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.339522546419098, | |
| "grad_norm": 14.659860419565552, | |
| "learning_rate": 6.775246033679105e-06, | |
| "loss": 0.1378, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.3421750663129974, | |
| "grad_norm": 15.094010123566344, | |
| "learning_rate": 6.760798499102121e-06, | |
| "loss": 0.0443, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.3448275862068966, | |
| "grad_norm": 12.088460229768618, | |
| "learning_rate": 6.7463341622863074e-06, | |
| "loss": 0.0737, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.3474801061007957, | |
| "grad_norm": 7.743245120977951, | |
| "learning_rate": 6.7318531612561145e-06, | |
| "loss": 0.0345, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.350132625994695, | |
| "grad_norm": 28.851196219475575, | |
| "learning_rate": 6.717355634195004e-06, | |
| "loss": 0.1229, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.3527851458885942, | |
| "grad_norm": 39.8940905052454, | |
| "learning_rate": 6.702841719444141e-06, | |
| "loss": 0.1008, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.3554376657824934, | |
| "grad_norm": 45.13925991061865, | |
| "learning_rate": 6.688311555501064e-06, | |
| "loss": 0.269, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.3580901856763925, | |
| "grad_norm": 16.570082019867765, | |
| "learning_rate": 6.673765281018373e-06, | |
| "loss": 0.1264, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.3607427055702916, | |
| "grad_norm": 9.936276076523223, | |
| "learning_rate": 6.659203034802397e-06, | |
| "loss": 0.0442, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.363395225464191, | |
| "grad_norm": 491.03239020588376, | |
| "learning_rate": 6.644624955811873e-06, | |
| "loss": 0.2389, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.3660477453580901, | |
| "grad_norm": 15.199004520494972, | |
| "learning_rate": 6.630031183156628e-06, | |
| "loss": 0.0862, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.3687002652519893, | |
| "grad_norm": 11.166070002874784, | |
| "learning_rate": 6.615421856096231e-06, | |
| "loss": 0.0523, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.3713527851458887, | |
| "grad_norm": 21.807652878549746, | |
| "learning_rate": 6.6007971140386915e-06, | |
| "loss": 0.1845, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.3740053050397878, | |
| "grad_norm": 9.085575610594937, | |
| "learning_rate": 6.586157096539105e-06, | |
| "loss": 0.0187, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.376657824933687, | |
| "grad_norm": 7.831782583878153, | |
| "learning_rate": 6.571501943298335e-06, | |
| "loss": 0.0404, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 9.70973898788606, | |
| "learning_rate": 6.556831794161678e-06, | |
| "loss": 0.0448, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.3819628647214854, | |
| "grad_norm": 68.24554091396725, | |
| "learning_rate": 6.542146789117524e-06, | |
| "loss": 0.1233, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 10.500883042336119, | |
| "learning_rate": 6.527447068296026e-06, | |
| "loss": 0.039, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.387267904509284, | |
| "grad_norm": 25.24639428355656, | |
| "learning_rate": 6.512732771967758e-06, | |
| "loss": 0.0529, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.389920424403183, | |
| "grad_norm": 3.132482062228408, | |
| "learning_rate": 6.498004040542385e-06, | |
| "loss": 0.0161, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.3925729442970822, | |
| "grad_norm": 38.50562742710699, | |
| "learning_rate": 6.483261014567311e-06, | |
| "loss": 0.0607, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.3952254641909814, | |
| "grad_norm": 13.901422622952236, | |
| "learning_rate": 6.4685038347263495e-06, | |
| "loss": 0.046, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.3978779840848805, | |
| "grad_norm": 38.43373398443968, | |
| "learning_rate": 6.453732641838372e-06, | |
| "loss": 0.1998, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.4005305039787799, | |
| "grad_norm": 28.880004159481086, | |
| "learning_rate": 6.4389475768559675e-06, | |
| "loss": 0.0993, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.403183023872679, | |
| "grad_norm": 41.90220673902449, | |
| "learning_rate": 6.4241487808641044e-06, | |
| "loss": 0.0416, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.4058355437665782, | |
| "grad_norm": 7.0622226239396735, | |
| "learning_rate": 6.409336395078771e-06, | |
| "loss": 0.0223, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.4084880636604775, | |
| "grad_norm": 37.18705225070335, | |
| "learning_rate": 6.394510560845637e-06, | |
| "loss": 0.1225, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.4111405835543767, | |
| "grad_norm": 12.461676647059019, | |
| "learning_rate": 6.379671419638703e-06, | |
| "loss": 0.0449, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 25.69067972611454, | |
| "learning_rate": 6.3648191130589524e-06, | |
| "loss": 0.1461, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.4164456233421752, | |
| "grad_norm": 4.433735117691165, | |
| "learning_rate": 6.349953782832991e-06, | |
| "loss": 0.0163, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.4190981432360743, | |
| "grad_norm": 35.5915224640076, | |
| "learning_rate": 6.335075570811708e-06, | |
| "loss": 0.0421, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.4217506631299734, | |
| "grad_norm": 12.883444328072557, | |
| "learning_rate": 6.320184618968915e-06, | |
| "loss": 0.0533, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.4244031830238728, | |
| "grad_norm": 24.55048357158165, | |
| "learning_rate": 6.305281069399989e-06, | |
| "loss": 0.0583, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.427055702917772, | |
| "grad_norm": 33.72346507980671, | |
| "learning_rate": 6.290365064320521e-06, | |
| "loss": 0.2696, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.429708222811671, | |
| "grad_norm": 106.39331911829048, | |
| "learning_rate": 6.275436746064957e-06, | |
| "loss": 0.301, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.4323607427055702, | |
| "grad_norm": 15.979137251612, | |
| "learning_rate": 6.26049625708524e-06, | |
| "loss": 0.0331, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.4350132625994694, | |
| "grad_norm": 28.11729203908229, | |
| "learning_rate": 6.245543739949455e-06, | |
| "loss": 0.2345, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.4376657824933687, | |
| "grad_norm": 13.077394458293497, | |
| "learning_rate": 6.2305793373404564e-06, | |
| "loss": 0.0358, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.4403183023872679, | |
| "grad_norm": 5.554488097306693, | |
| "learning_rate": 6.215603192054523e-06, | |
| "loss": 0.0144, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.442970822281167, | |
| "grad_norm": 3.1244872050868495, | |
| "learning_rate": 6.2006154469999824e-06, | |
| "loss": 0.0097, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.4456233421750664, | |
| "grad_norm": 8.489049690971624, | |
| "learning_rate": 6.185616245195849e-06, | |
| "loss": 0.0272, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.4482758620689655, | |
| "grad_norm": 39.45660661182102, | |
| "learning_rate": 6.17060572977047e-06, | |
| "loss": 0.0626, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.4509283819628647, | |
| "grad_norm": 9.639199657748263, | |
| "learning_rate": 6.155584043960145e-06, | |
| "loss": 0.1411, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.453580901856764, | |
| "grad_norm": 27.845194387729055, | |
| "learning_rate": 6.140551331107767e-06, | |
| "loss": 0.0311, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.4562334217506632, | |
| "grad_norm": 43.69139166324222, | |
| "learning_rate": 6.125507734661458e-06, | |
| "loss": 0.0922, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.4588859416445623, | |
| "grad_norm": 39.59031227302006, | |
| "learning_rate": 6.110453398173188e-06, | |
| "loss": 0.0969, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 46.21688165038216, | |
| "learning_rate": 6.095388465297418e-06, | |
| "loss": 0.0548, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.4641909814323608, | |
| "grad_norm": 25.52563185215739, | |
| "learning_rate": 6.080313079789723e-06, | |
| "loss": 0.0626, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.46684350132626, | |
| "grad_norm": 15.010437110226805, | |
| "learning_rate": 6.0652273855054225e-06, | |
| "loss": 0.0799, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.469496021220159, | |
| "grad_norm": 0.6240187668769939, | |
| "learning_rate": 6.050131526398202e-06, | |
| "loss": 0.0071, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.4721485411140582, | |
| "grad_norm": 24.41151697104724, | |
| "learning_rate": 6.035025646518747e-06, | |
| "loss": 0.0566, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.4748010610079576, | |
| "grad_norm": 43.88869277127279, | |
| "learning_rate": 6.019909890013367e-06, | |
| "loss": 0.1289, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.4774535809018567, | |
| "grad_norm": 1.8126838533443241, | |
| "learning_rate": 6.004784401122613e-06, | |
| "loss": 0.0123, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.4801061007957559, | |
| "grad_norm": 50.60535466560564, | |
| "learning_rate": 5.9896493241799115e-06, | |
| "loss": 0.2817, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.4827586206896552, | |
| "grad_norm": 31.769450904461493, | |
| "learning_rate": 5.974504803610178e-06, | |
| "loss": 0.038, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.4854111405835544, | |
| "grad_norm": 36.41160490344729, | |
| "learning_rate": 5.959350983928446e-06, | |
| "loss": 0.2033, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.4880636604774535, | |
| "grad_norm": 1.0570484873390567, | |
| "learning_rate": 5.944188009738483e-06, | |
| "loss": 0.007, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.490716180371353, | |
| "grad_norm": 7.600018984649344, | |
| "learning_rate": 5.929016025731413e-06, | |
| "loss": 0.0117, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.493368700265252, | |
| "grad_norm": 46.11036080527606, | |
| "learning_rate": 5.913835176684335e-06, | |
| "loss": 0.0896, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.4960212201591512, | |
| "grad_norm": 6.801838928839265, | |
| "learning_rate": 5.898645607458941e-06, | |
| "loss": 0.018, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.4986737400530503, | |
| "grad_norm": 33.05399188153241, | |
| "learning_rate": 5.883447463000136e-06, | |
| "loss": 0.0144, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.5013262599469495, | |
| "grad_norm": 23.00632593695278, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.0361, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.5039787798408488, | |
| "grad_norm": 3.5405959675882896, | |
| "learning_rate": 5.8530260285696674e-06, | |
| "loss": 0.0108, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.506631299734748, | |
| "grad_norm": 11.97736850552611, | |
| "learning_rate": 5.837803028891418e-06, | |
| "loss": 0.0239, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.509283819628647, | |
| "grad_norm": 6.472410570187345, | |
| "learning_rate": 5.822572034563812e-06, | |
| "loss": 0.0167, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.5119363395225465, | |
| "grad_norm": 4.641359282270338, | |
| "learning_rate": 5.807333190927054e-06, | |
| "loss": 0.0081, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.5145888594164456, | |
| "grad_norm": 119.51677620720801, | |
| "learning_rate": 5.792086643396238e-06, | |
| "loss": 0.0296, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 19.160987194114814, | |
| "learning_rate": 5.776832537459983e-06, | |
| "loss": 0.2339, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.5198938992042441, | |
| "grad_norm": 64.96051480014378, | |
| "learning_rate": 5.761571018679025e-06, | |
| "loss": 0.2978, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.5225464190981433, | |
| "grad_norm": 22.520538143186805, | |
| "learning_rate": 5.746302232684843e-06, | |
| "loss": 0.0431, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.5251989389920424, | |
| "grad_norm": 8.49094605957141, | |
| "learning_rate": 5.731026325178255e-06, | |
| "loss": 0.0211, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.5278514588859418, | |
| "grad_norm": 45.70005511753745, | |
| "learning_rate": 5.715743441928041e-06, | |
| "loss": 0.098, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.530503978779841, | |
| "grad_norm": 14.782978428535925, | |
| "learning_rate": 5.700453728769545e-06, | |
| "loss": 0.0165, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.53315649867374, | |
| "grad_norm": 13.046864605038222, | |
| "learning_rate": 5.6851573316032845e-06, | |
| "loss": 0.0241, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.5358090185676394, | |
| "grad_norm": 64.59802552950447, | |
| "learning_rate": 5.669854396393559e-06, | |
| "loss": 0.0738, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 166.49480105447577, | |
| "learning_rate": 5.654545069167056e-06, | |
| "loss": 0.1385, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.5411140583554377, | |
| "grad_norm": 17.008871192463243, | |
| "learning_rate": 5.639229496011456e-06, | |
| "loss": 0.0833, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.5437665782493368, | |
| "grad_norm": 1.8492882406831308, | |
| "learning_rate": 5.623907823074044e-06, | |
| "loss": 0.0073, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.546419098143236, | |
| "grad_norm": 30.430767357234902, | |
| "learning_rate": 5.60858019656031e-06, | |
| "loss": 0.0909, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.5490716180371353, | |
| "grad_norm": 13.266355913015387, | |
| "learning_rate": 5.593246762732558e-06, | |
| "loss": 0.0483, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.5517241379310345, | |
| "grad_norm": 14.567204624660986, | |
| "learning_rate": 5.577907667908505e-06, | |
| "loss": 0.0256, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.5543766578249336, | |
| "grad_norm": 34.19788288920179, | |
| "learning_rate": 5.562563058459884e-06, | |
| "loss": 0.1541, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.557029177718833, | |
| "grad_norm": 54.14509214775431, | |
| "learning_rate": 5.5472130808110595e-06, | |
| "loss": 0.1065, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.5596816976127321, | |
| "grad_norm": 28.62720507192516, | |
| "learning_rate": 5.531857881437612e-06, | |
| "loss": 0.0893, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.5623342175066313, | |
| "grad_norm": 28.054311802068526, | |
| "learning_rate": 5.516497606864959e-06, | |
| "loss": 0.0565, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.5649867374005306, | |
| "grad_norm": 31.379538762961293, | |
| "learning_rate": 5.50113240366694e-06, | |
| "loss": 0.1069, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.5676392572944295, | |
| "grad_norm": 10.37843513767325, | |
| "learning_rate": 5.48576241846443e-06, | |
| "loss": 0.0262, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.570291777188329, | |
| "grad_norm": 15.483387797663624, | |
| "learning_rate": 5.470387797923934e-06, | |
| "loss": 0.0173, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.5729442970822283, | |
| "grad_norm": 15.29920880392035, | |
| "learning_rate": 5.4550086887561874e-06, | |
| "loss": 0.0319, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.5755968169761272, | |
| "grad_norm": 32.908373060607694, | |
| "learning_rate": 5.4396252377147615e-06, | |
| "loss": 0.0486, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.5782493368700266, | |
| "grad_norm": 4.944210731487496, | |
| "learning_rate": 5.424237591594658e-06, | |
| "loss": 0.0122, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.5809018567639257, | |
| "grad_norm": 14.2873911899618, | |
| "learning_rate": 5.4088458972309085e-06, | |
| "loss": 0.029, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.5835543766578248, | |
| "grad_norm": 19.811484884375027, | |
| "learning_rate": 5.39345030149718e-06, | |
| "loss": 0.0395, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.5862068965517242, | |
| "grad_norm": 25.622053063552784, | |
| "learning_rate": 5.378050951304356e-06, | |
| "loss": 0.0648, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.5888594164456233, | |
| "grad_norm": 17.43025816114162, | |
| "learning_rate": 5.362647993599159e-06, | |
| "loss": 0.0193, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.5915119363395225, | |
| "grad_norm": 5.31676256926054, | |
| "learning_rate": 5.347241575362729e-06, | |
| "loss": 0.0124, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.5941644562334218, | |
| "grad_norm": 1.1389079557738029, | |
| "learning_rate": 5.3318318436092335e-06, | |
| "loss": 0.0069, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.596816976127321, | |
| "grad_norm": 10.627284322705297, | |
| "learning_rate": 5.31641894538445e-06, | |
| "loss": 0.0151, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.5994694960212201, | |
| "grad_norm": 98.99497329825951, | |
| "learning_rate": 5.30100302776438e-06, | |
| "loss": 0.0967, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.6021220159151195, | |
| "grad_norm": 24.27158846293466, | |
| "learning_rate": 5.285584237853832e-06, | |
| "loss": 0.0324, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.6047745358090184, | |
| "grad_norm": 28.948767908864784, | |
| "learning_rate": 5.270162722785026e-06, | |
| "loss": 0.033, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.6074270557029178, | |
| "grad_norm": 26.33013336955424, | |
| "learning_rate": 5.254738629716186e-06, | |
| "loss": 0.1195, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.6100795755968171, | |
| "grad_norm": 36.50352169617263, | |
| "learning_rate": 5.239312105830135e-06, | |
| "loss": 0.0208, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.612732095490716, | |
| "grad_norm": 19.431136725192903, | |
| "learning_rate": 5.223883298332894e-06, | |
| "loss": 0.0259, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 34.3112525202407, | |
| "learning_rate": 5.208452354452275e-06, | |
| "loss": 0.0651, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.6180371352785146, | |
| "grad_norm": 37.8345226955255, | |
| "learning_rate": 5.19301942143647e-06, | |
| "loss": 0.0347, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 68.3527326674225, | |
| "learning_rate": 5.1775846465526625e-06, | |
| "loss": 0.1516, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.623342175066313, | |
| "grad_norm": 66.62188237442813, | |
| "learning_rate": 5.162148177085604e-06, | |
| "loss": 0.1933, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.6259946949602122, | |
| "grad_norm": 65.94874176325419, | |
| "learning_rate": 5.14671016033622e-06, | |
| "loss": 0.0999, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.6286472148541113, | |
| "grad_norm": 4.319664540685107, | |
| "learning_rate": 5.131270743620201e-06, | |
| "loss": 0.0104, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.6312997347480107, | |
| "grad_norm": 5.488574522745108, | |
| "learning_rate": 5.115830074266592e-06, | |
| "loss": 0.0123, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.6339522546419099, | |
| "grad_norm": 47.41871483146368, | |
| "learning_rate": 5.100388299616395e-06, | |
| "loss": 0.0305, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.636604774535809, | |
| "grad_norm": 5.2412860000731225, | |
| "learning_rate": 5.084945567021159e-06, | |
| "loss": 0.0118, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.6392572944297084, | |
| "grad_norm": 20.43067020592513, | |
| "learning_rate": 5.069502023841576e-06, | |
| "loss": 0.0136, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.6419098143236073, | |
| "grad_norm": 55.13620630926507, | |
| "learning_rate": 5.054057817446067e-06, | |
| "loss": 0.027, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.6445623342175066, | |
| "grad_norm": 24.012783309520152, | |
| "learning_rate": 5.038613095209392e-06, | |
| "loss": 0.0297, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.647214854111406, | |
| "grad_norm": 246.41322304845076, | |
| "learning_rate": 5.0231680045112174e-06, | |
| "loss": 0.0122, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.649867374005305, | |
| "grad_norm": 1.908263943570197, | |
| "learning_rate": 5.00772269273474e-06, | |
| "loss": 0.0055, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.6525198938992043, | |
| "grad_norm": 7.9188343410709, | |
| "learning_rate": 4.9922773072652615e-06, | |
| "loss": 0.0118, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.6551724137931034, | |
| "grad_norm": 3.9827474382276122, | |
| "learning_rate": 4.976831995488784e-06, | |
| "loss": 0.007, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.6578249336870026, | |
| "grad_norm": 35.85065017259431, | |
| "learning_rate": 4.961386904790611e-06, | |
| "loss": 0.0291, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.660477453580902, | |
| "grad_norm": 1.8833465914635488, | |
| "learning_rate": 4.945942182553932e-06, | |
| "loss": 0.0061, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.663129973474801, | |
| "grad_norm": 2.0782864974444752, | |
| "learning_rate": 4.9304979761584256e-06, | |
| "loss": 0.0072, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.6657824933687002, | |
| "grad_norm": 0.9498564062083344, | |
| "learning_rate": 4.915054432978842e-06, | |
| "loss": 0.0054, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.6684350132625996, | |
| "grad_norm": 39.36865575587075, | |
| "learning_rate": 4.899611700383608e-06, | |
| "loss": 0.0337, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.6710875331564987, | |
| "grad_norm": 44.4769789219546, | |
| "learning_rate": 4.884169925733409e-06, | |
| "loss": 0.1049, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.6737400530503979, | |
| "grad_norm": 3.166712761063399, | |
| "learning_rate": 4.868729256379802e-06, | |
| "loss": 0.0059, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.6763925729442972, | |
| "grad_norm": 0.5167268015197257, | |
| "learning_rate": 4.8532898396637815e-06, | |
| "loss": 0.0045, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.6790450928381961, | |
| "grad_norm": 0.4315587098083175, | |
| "learning_rate": 4.837851822914397e-06, | |
| "loss": 0.0045, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.6816976127320955, | |
| "grad_norm": 34.489146105146105, | |
| "learning_rate": 4.822415353447339e-06, | |
| "loss": 0.1604, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.6843501326259946, | |
| "grad_norm": 0.347847431409771, | |
| "learning_rate": 4.80698057856353e-06, | |
| "loss": 0.0041, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.6870026525198938, | |
| "grad_norm": 0.4165775687885535, | |
| "learning_rate": 4.791547645547727e-06, | |
| "loss": 0.0041, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.6896551724137931, | |
| "grad_norm": 117.93594590550879, | |
| "learning_rate": 4.7761167016671064e-06, | |
| "loss": 0.1003, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 66.349687103629, | |
| "learning_rate": 4.760687894169867e-06, | |
| "loss": 0.1888, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.6949602122015914, | |
| "grad_norm": 2.5640604180704947, | |
| "learning_rate": 4.7452613702838166e-06, | |
| "loss": 0.0056, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.6976127320954908, | |
| "grad_norm": 243.72962362462474, | |
| "learning_rate": 4.729837277214975e-06, | |
| "loss": 0.0926, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.70026525198939, | |
| "grad_norm": 7.350167948874671, | |
| "learning_rate": 4.7144157621461694e-06, | |
| "loss": 0.0095, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.702917771883289, | |
| "grad_norm": 0.6606315834117484, | |
| "learning_rate": 4.698996972235622e-06, | |
| "loss": 0.0043, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.7055702917771884, | |
| "grad_norm": 6.7420487073193005, | |
| "learning_rate": 4.683581054615551e-06, | |
| "loss": 0.0095, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.7082228116710876, | |
| "grad_norm": 5.880007837879827, | |
| "learning_rate": 4.668168156390769e-06, | |
| "loss": 0.0061, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.7108753315649867, | |
| "grad_norm": 5.405586207286194, | |
| "learning_rate": 4.652758424637271e-06, | |
| "loss": 0.0074, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.713527851458886, | |
| "grad_norm": 13.642408752250786, | |
| "learning_rate": 4.637352006400842e-06, | |
| "loss": 0.015, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.716180371352785, | |
| "grad_norm": 21.078816092264656, | |
| "learning_rate": 4.621949048695646e-06, | |
| "loss": 0.0198, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.7188328912466844, | |
| "grad_norm": 53.552715766878414, | |
| "learning_rate": 4.606549698502824e-06, | |
| "loss": 0.0687, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.7214854111405835, | |
| "grad_norm": 65.69163142454755, | |
| "learning_rate": 4.5911541027690914e-06, | |
| "loss": 0.1032, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 6.4577164762694785, | |
| "learning_rate": 4.575762408405343e-06, | |
| "loss": 0.0054, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.726790450928382, | |
| "grad_norm": 22.799875878261123, | |
| "learning_rate": 4.56037476228524e-06, | |
| "loss": 0.051, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.7294429708222812, | |
| "grad_norm": 9.888082550044638, | |
| "learning_rate": 4.544991311243815e-06, | |
| "loss": 0.0087, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.7320954907161803, | |
| "grad_norm": 30.204004753927336, | |
| "learning_rate": 4.529612202076069e-06, | |
| "loss": 0.193, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.7347480106100797, | |
| "grad_norm": 0.20191470860720032, | |
| "learning_rate": 4.514237581535571e-06, | |
| "loss": 0.0033, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.7374005305039788, | |
| "grad_norm": 5.33579782789315, | |
| "learning_rate": 4.498867596333061e-06, | |
| "loss": 0.0055, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.740053050397878, | |
| "grad_norm": 0.4949743432127943, | |
| "learning_rate": 4.4835023931350425e-06, | |
| "loss": 0.0037, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.7427055702917773, | |
| "grad_norm": 26.415507427778962, | |
| "learning_rate": 4.468142118562389e-06, | |
| "loss": 0.0243, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.7453580901856764, | |
| "grad_norm": 23.92557305475512, | |
| "learning_rate": 4.452786919188943e-06, | |
| "loss": 0.0094, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.7480106100795756, | |
| "grad_norm": 1.34416749285188, | |
| "learning_rate": 4.437436941540116e-06, | |
| "loss": 0.0042, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.750663129973475, | |
| "grad_norm": 35.23422292881152, | |
| "learning_rate": 4.422092332091497e-06, | |
| "loss": 0.1418, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.7533156498673739, | |
| "grad_norm": 21.82973318934849, | |
| "learning_rate": 4.4067532372674434e-06, | |
| "loss": 0.0118, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.7559681697612732, | |
| "grad_norm": 10.25652325742471, | |
| "learning_rate": 4.391419803439691e-06, | |
| "loss": 0.0098, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.7586206896551724, | |
| "grad_norm": 1.0092521511671273, | |
| "learning_rate": 4.3760921769259585e-06, | |
| "loss": 0.0042, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.7612732095490715, | |
| "grad_norm": 0.18022257900606575, | |
| "learning_rate": 4.360770503988545e-06, | |
| "loss": 0.0032, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.7639257294429709, | |
| "grad_norm": 0.18362886938020564, | |
| "learning_rate": 4.345454930832946e-06, | |
| "loss": 0.0032, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.76657824933687, | |
| "grad_norm": 52.59486973274669, | |
| "learning_rate": 4.3301456036064415e-06, | |
| "loss": 0.0188, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.17663417348622706, | |
| "learning_rate": 4.314842668396716e-06, | |
| "loss": 0.003, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.7718832891246685, | |
| "grad_norm": 34.55861746298538, | |
| "learning_rate": 4.299546271230457e-06, | |
| "loss": 0.1371, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.7745358090185677, | |
| "grad_norm": 3.00901581132572, | |
| "learning_rate": 4.28425655807196e-06, | |
| "loss": 0.0058, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.7771883289124668, | |
| "grad_norm": 1.472166462932808, | |
| "learning_rate": 4.268973674821747e-06, | |
| "loss": 0.0036, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.7798408488063662, | |
| "grad_norm": 0.30438323731414085, | |
| "learning_rate": 4.2536977673151594e-06, | |
| "loss": 0.0029, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.782493368700265, | |
| "grad_norm": 0.8798922827717847, | |
| "learning_rate": 4.2384289813209754e-06, | |
| "loss": 0.0034, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.7851458885941645, | |
| "grad_norm": 2.200407200049607, | |
| "learning_rate": 4.223167462540018e-06, | |
| "loss": 0.0038, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.7877984084880638, | |
| "grad_norm": 0.8436364418418659, | |
| "learning_rate": 4.207913356603762e-06, | |
| "loss": 0.0031, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.7904509283819627, | |
| "grad_norm": 0.34783958925230496, | |
| "learning_rate": 4.192666809072948e-06, | |
| "loss": 0.0029, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.793103448275862, | |
| "grad_norm": 8.706721085918481, | |
| "learning_rate": 4.1774279654361895e-06, | |
| "loss": 0.0093, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.7957559681697612, | |
| "grad_norm": 46.6268725543766, | |
| "learning_rate": 4.162196971108584e-06, | |
| "loss": 0.0428, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.7984084880636604, | |
| "grad_norm": 21.44766513368348, | |
| "learning_rate": 4.146973971430333e-06, | |
| "loss": 0.0105, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.8010610079575597, | |
| "grad_norm": 17.24328013019805, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.1565, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.8037135278514589, | |
| "grad_norm": 0.1531046540956738, | |
| "learning_rate": 4.116552536999865e-06, | |
| "loss": 0.0027, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.806366047745358, | |
| "grad_norm": 0.34748754074866783, | |
| "learning_rate": 4.101354392541061e-06, | |
| "loss": 0.0028, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.8090185676392574, | |
| "grad_norm": 6.766920577207019, | |
| "learning_rate": 4.086164823315667e-06, | |
| "loss": 0.004, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.8116710875331565, | |
| "grad_norm": 44.46819293351116, | |
| "learning_rate": 4.070983974268588e-06, | |
| "loss": 0.031, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.8143236074270557, | |
| "grad_norm": 11.603635061420663, | |
| "learning_rate": 4.055811990261518e-06, | |
| "loss": 0.0099, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.816976127320955, | |
| "grad_norm": 0.4777433788159791, | |
| "learning_rate": 4.040649016071555e-06, | |
| "loss": 0.0029, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.819628647214854, | |
| "grad_norm": 40.475341750138206, | |
| "learning_rate": 4.025495196389824e-06, | |
| "loss": 0.0787, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.8222811671087533, | |
| "grad_norm": 0.2714511227984912, | |
| "learning_rate": 4.010350675820091e-06, | |
| "loss": 0.0027, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.8249336870026527, | |
| "grad_norm": 0.5834117283731054, | |
| "learning_rate": 3.9952155988773876e-06, | |
| "loss": 0.0031, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 0.6449148052226085, | |
| "learning_rate": 3.980090109986634e-06, | |
| "loss": 0.0029, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.830238726790451, | |
| "grad_norm": 17.141551314961788, | |
| "learning_rate": 3.964974353481254e-06, | |
| "loss": 0.0299, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.83289124668435, | |
| "grad_norm": 25.433319764821093, | |
| "learning_rate": 3.949868473601801e-06, | |
| "loss": 0.0427, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.8355437665782492, | |
| "grad_norm": 0.6583579876602191, | |
| "learning_rate": 3.934772614494581e-06, | |
| "loss": 0.0032, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.8381962864721486, | |
| "grad_norm": 5.5504711429288545, | |
| "learning_rate": 3.9196869202102775e-06, | |
| "loss": 0.0052, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.8408488063660478, | |
| "grad_norm": 0.2578200579248058, | |
| "learning_rate": 3.904611534702583e-06, | |
| "loss": 0.0025, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.843501326259947, | |
| "grad_norm": 4.66905475011612, | |
| "learning_rate": 3.889546601826813e-06, | |
| "loss": 0.005, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 50.6553420715786, | |
| "learning_rate": 3.874492265338544e-06, | |
| "loss": 0.0401, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.8488063660477454, | |
| "grad_norm": 0.155879357954373, | |
| "learning_rate": 3.859448668892233e-06, | |
| "loss": 0.0024, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.8514588859416445, | |
| "grad_norm": 5.118868015888596, | |
| "learning_rate": 3.844415956039856e-06, | |
| "loss": 0.0031, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.854111405835544, | |
| "grad_norm": 0.1458255118431307, | |
| "learning_rate": 3.829394270229531e-06, | |
| "loss": 0.0024, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.8567639257294428, | |
| "grad_norm": 0.15503449645311218, | |
| "learning_rate": 3.814383754804152e-06, | |
| "loss": 0.0024, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.8594164456233422, | |
| "grad_norm": 13.511687384914936, | |
| "learning_rate": 3.79938455300002e-06, | |
| "loss": 0.0537, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.8620689655172413, | |
| "grad_norm": 37.51104666572117, | |
| "learning_rate": 3.7843968079454773e-06, | |
| "loss": 0.0821, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.8647214854111405, | |
| "grad_norm": 0.13563308608550503, | |
| "learning_rate": 3.7694206626595444e-06, | |
| "loss": 0.0023, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.8673740053050398, | |
| "grad_norm": 0.18848861798425623, | |
| "learning_rate": 3.7544562600505475e-06, | |
| "loss": 0.0023, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.870026525198939, | |
| "grad_norm": 0.21091030708084085, | |
| "learning_rate": 3.7395037429147615e-06, | |
| "loss": 0.0023, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.8726790450928381, | |
| "grad_norm": 15.378659773256478, | |
| "learning_rate": 3.724563253935045e-06, | |
| "loss": 0.0182, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.8753315649867375, | |
| "grad_norm": 75.53306694832403, | |
| "learning_rate": 3.7096349356794803e-06, | |
| "loss": 0.1587, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.8779840848806366, | |
| "grad_norm": 125.70443954838983, | |
| "learning_rate": 3.694718930600012e-06, | |
| "loss": 0.1481, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.8806366047745358, | |
| "grad_norm": 0.1516937714071399, | |
| "learning_rate": 3.6798153810310854e-06, | |
| "loss": 0.0023, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.8832891246684351, | |
| "grad_norm": 106.63520750539608, | |
| "learning_rate": 3.6649244291882923e-06, | |
| "loss": 0.0725, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.8859416445623343, | |
| "grad_norm": 16.272773685544387, | |
| "learning_rate": 3.6500462171670104e-06, | |
| "loss": 0.0155, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.8885941644562334, | |
| "grad_norm": 2.0319296678156427, | |
| "learning_rate": 3.6351808869410484e-06, | |
| "loss": 0.0028, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.8912466843501328, | |
| "grad_norm": 173.5411500891954, | |
| "learning_rate": 3.6203285803612975e-06, | |
| "loss": 0.0279, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.8938992042440317, | |
| "grad_norm": 2.234030900991913, | |
| "learning_rate": 3.605489439154365e-06, | |
| "loss": 0.0045, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.896551724137931, | |
| "grad_norm": 29.878514877041937, | |
| "learning_rate": 3.5906636049212316e-06, | |
| "loss": 0.0518, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.8992042440318302, | |
| "grad_norm": 23.612513450372212, | |
| "learning_rate": 3.575851219135898e-06, | |
| "loss": 0.0225, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.9018567639257293, | |
| "grad_norm": 13.40219828568539, | |
| "learning_rate": 3.5610524231440324e-06, | |
| "loss": 0.0051, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.9045092838196287, | |
| "grad_norm": 0.7617183910694865, | |
| "learning_rate": 3.5462673581616298e-06, | |
| "loss": 0.0027, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.9071618037135278, | |
| "grad_norm": 76.01576611393702, | |
| "learning_rate": 3.5314961652736517e-06, | |
| "loss": 0.054, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.909814323607427, | |
| "grad_norm": 4.686784862497009, | |
| "learning_rate": 3.5167389854326907e-06, | |
| "loss": 0.0032, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.9124668435013263, | |
| "grad_norm": 3.3635617826020194, | |
| "learning_rate": 3.501995959457616e-06, | |
| "loss": 0.0033, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.9151193633952255, | |
| "grad_norm": 0.14187419174072188, | |
| "learning_rate": 3.487267228032242e-06, | |
| "loss": 0.0021, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.9177718832891246, | |
| "grad_norm": 22.366611259381504, | |
| "learning_rate": 3.472552931703975e-06, | |
| "loss": 0.0104, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.920424403183024, | |
| "grad_norm": 0.14420645288299855, | |
| "learning_rate": 3.457853210882477e-06, | |
| "loss": 0.0021, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 23.81159747484969, | |
| "learning_rate": 3.443168205838323e-06, | |
| "loss": 0.0144, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.9257294429708223, | |
| "grad_norm": 3.577888638195551, | |
| "learning_rate": 3.428498056701665e-06, | |
| "loss": 0.0071, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.9283819628647216, | |
| "grad_norm": 0.3401435550413983, | |
| "learning_rate": 3.413842903460896e-06, | |
| "loss": 0.0024, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 0.1764668896414946, | |
| "learning_rate": 3.39920288596131e-06, | |
| "loss": 0.0022, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.93368700265252, | |
| "grad_norm": 0.11599443588337781, | |
| "learning_rate": 3.3845781439037695e-06, | |
| "loss": 0.002, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.936339522546419, | |
| "grad_norm": 0.26998040037052706, | |
| "learning_rate": 3.369968816843375e-06, | |
| "loss": 0.0021, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.9389920424403182, | |
| "grad_norm": 0.11246222321274224, | |
| "learning_rate": 3.3553750441881266e-06, | |
| "loss": 0.002, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.9416445623342176, | |
| "grad_norm": 14.149523916959522, | |
| "learning_rate": 3.3407969651976045e-06, | |
| "loss": 0.0107, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.9442970822281167, | |
| "grad_norm": 0.11957540686765213, | |
| "learning_rate": 3.326234718981628e-06, | |
| "loss": 0.002, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.9469496021220158, | |
| "grad_norm": 12.058904025993744, | |
| "learning_rate": 3.311688444498937e-06, | |
| "loss": 0.0124, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.9496021220159152, | |
| "grad_norm": 13.396218874482619, | |
| "learning_rate": 3.2971582805558622e-06, | |
| "loss": 0.0081, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.9522546419098143, | |
| "grad_norm": 29.29268777424113, | |
| "learning_rate": 3.2826443658049977e-06, | |
| "loss": 0.0158, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.9549071618037135, | |
| "grad_norm": 0.46579167700659485, | |
| "learning_rate": 3.2681468387438876e-06, | |
| "loss": 0.0021, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.9575596816976129, | |
| "grad_norm": 0.3536695125051511, | |
| "learning_rate": 3.253665837713694e-06, | |
| "loss": 0.0022, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.9602122015915118, | |
| "grad_norm": 6.891295718169065, | |
| "learning_rate": 3.239201500897881e-06, | |
| "loss": 0.0033, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.9628647214854111, | |
| "grad_norm": 0.7698901035663716, | |
| "learning_rate": 3.224753966320898e-06, | |
| "loss": 0.0024, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.9655172413793105, | |
| "grad_norm": 0.11484925883131841, | |
| "learning_rate": 3.2103233718468574e-06, | |
| "loss": 0.0019, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.9681697612732094, | |
| "grad_norm": 0.11269632332694055, | |
| "learning_rate": 3.1959098551782285e-06, | |
| "loss": 0.0019, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.9708222811671088, | |
| "grad_norm": 0.10750371446519882, | |
| "learning_rate": 3.181513553854514e-06, | |
| "loss": 0.0018, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.973474801061008, | |
| "grad_norm": 27.91274099515536, | |
| "learning_rate": 3.167134605250938e-06, | |
| "loss": 0.0236, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.976127320954907, | |
| "grad_norm": 4.635156938141786, | |
| "learning_rate": 3.152773146577138e-06, | |
| "loss": 0.0028, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.9787798408488064, | |
| "grad_norm": 0.10837590253402313, | |
| "learning_rate": 3.138429314875865e-06, | |
| "loss": 0.0018, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.9814323607427056, | |
| "grad_norm": 0.10512317169385756, | |
| "learning_rate": 3.1241032470216564e-06, | |
| "loss": 0.0018, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.9840848806366047, | |
| "grad_norm": 0.15713070471420854, | |
| "learning_rate": 3.109795079719544e-06, | |
| "loss": 0.0018, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.986737400530504, | |
| "grad_norm": 0.1117481175631464, | |
| "learning_rate": 3.0955049495037435e-06, | |
| "loss": 0.0018, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.9893899204244032, | |
| "grad_norm": 0.8320658742184063, | |
| "learning_rate": 3.081232992736355e-06, | |
| "loss": 0.0022, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.9920424403183024, | |
| "grad_norm": 1.1226609993016856, | |
| "learning_rate": 3.0669793456060613e-06, | |
| "loss": 0.002, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.9946949602122017, | |
| "grad_norm": 9.229479885535163, | |
| "learning_rate": 3.052744144126826e-06, | |
| "loss": 0.0042, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.9973474801061006, | |
| "grad_norm": 18.484575713900153, | |
| "learning_rate": 3.0385275241365965e-06, | |
| "loss": 0.0055, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.1049135207644034, | |
| "learning_rate": 3.024329621296008e-06, | |
| "loss": 0.0017, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.2834593057632446, | |
| "eval_runtime": 175.5899, | |
| "eval_samples_per_second": 12.045, | |
| "eval_steps_per_second": 1.509, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0026525198938994, | |
| "grad_norm": 0.10101414623833138, | |
| "learning_rate": 3.0101505710870914e-06, | |
| "loss": 0.0017, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.0053050397877983, | |
| "grad_norm": 121.10117336213098, | |
| "learning_rate": 2.9959905088119777e-06, | |
| "loss": 0.017, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.0079575596816976, | |
| "grad_norm": 0.09676049678711801, | |
| "learning_rate": 2.981849569591606e-06, | |
| "loss": 0.0017, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.010610079575597, | |
| "grad_norm": 0.09764992907138446, | |
| "learning_rate": 2.9677278883644367e-06, | |
| "loss": 0.0017, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.013262599469496, | |
| "grad_norm": 0.12161105471130512, | |
| "learning_rate": 2.9536255998851615e-06, | |
| "loss": 0.0017, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.0159151193633953, | |
| "grad_norm": 0.3552964778774677, | |
| "learning_rate": 2.9395428387234192e-06, | |
| "loss": 0.0018, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.018567639257294, | |
| "grad_norm": 18.783349816477077, | |
| "learning_rate": 2.9254797392625146e-06, | |
| "loss": 0.0059, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.0212201591511936, | |
| "grad_norm": 0.09852900541452761, | |
| "learning_rate": 2.9114364356981274e-06, | |
| "loss": 0.0017, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.023872679045093, | |
| "grad_norm": 0.1438724087105207, | |
| "learning_rate": 2.8974130620370405e-06, | |
| "loss": 0.0017, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.026525198938992, | |
| "grad_norm": 29.762842374389095, | |
| "learning_rate": 2.883409752095857e-06, | |
| "loss": 0.1009, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.029177718832891, | |
| "grad_norm": 0.17818985776038995, | |
| "learning_rate": 2.8694266394997238e-06, | |
| "loss": 0.0017, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.0318302387267906, | |
| "grad_norm": 3.859943692060382, | |
| "learning_rate": 2.8554638576810565e-06, | |
| "loss": 0.0021, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.0344827586206895, | |
| "grad_norm": 0.625503230759066, | |
| "learning_rate": 2.8415215398782657e-06, | |
| "loss": 0.0017, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.037135278514589, | |
| "grad_norm": 0.4145565521734179, | |
| "learning_rate": 2.827599819134489e-06, | |
| "loss": 0.002, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.0397877984084882, | |
| "grad_norm": 0.24554217445213175, | |
| "learning_rate": 2.813698828296312e-06, | |
| "loss": 0.0017, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.042440318302387, | |
| "grad_norm": 11.827792784868253, | |
| "learning_rate": 2.799818700012518e-06, | |
| "loss": 0.0031, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.0450928381962865, | |
| "grad_norm": 0.09099465142665557, | |
| "learning_rate": 2.7859595667328027e-06, | |
| "loss": 0.0016, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.047745358090186, | |
| "grad_norm": 0.14639851471248677, | |
| "learning_rate": 2.772121560706522e-06, | |
| "loss": 0.0016, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.050397877984085, | |
| "grad_norm": 0.08792165154659169, | |
| "learning_rate": 2.758304813981428e-06, | |
| "loss": 0.0015, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.053050397877984, | |
| "grad_norm": 1.5842864989485481, | |
| "learning_rate": 2.7445094584024067e-06, | |
| "loss": 0.0019, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.055702917771883, | |
| "grad_norm": 0.09802686427150523, | |
| "learning_rate": 2.7307356256102215e-06, | |
| "loss": 0.0015, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.0583554376657824, | |
| "grad_norm": 26.05241910155726, | |
| "learning_rate": 2.716983447040257e-06, | |
| "loss": 0.0528, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.061007957559682, | |
| "grad_norm": 58.20790352116913, | |
| "learning_rate": 2.703253053921266e-06, | |
| "loss": 0.126, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.0636604774535807, | |
| "grad_norm": 1.0111648799278619, | |
| "learning_rate": 2.689544577274113e-06, | |
| "loss": 0.0017, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.06631299734748, | |
| "grad_norm": 0.23489459557522469, | |
| "learning_rate": 2.6758581479105274e-06, | |
| "loss": 0.0016, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.0689655172413794, | |
| "grad_norm": 53.804694742394936, | |
| "learning_rate": 2.6621938964318593e-06, | |
| "loss": 0.2099, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.0716180371352784, | |
| "grad_norm": 0.5405793593391763, | |
| "learning_rate": 2.6485519532278235e-06, | |
| "loss": 0.0017, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.0742705570291777, | |
| "grad_norm": 0.14108840494858138, | |
| "learning_rate": 2.6349324484752612e-06, | |
| "loss": 0.0015, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.08587398205977333, | |
| "learning_rate": 2.621335512136899e-06, | |
| "loss": 0.0015, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.079575596816976, | |
| "grad_norm": 0.08756504123370183, | |
| "learning_rate": 2.6077612739601015e-06, | |
| "loss": 0.0015, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.0822281167108754, | |
| "grad_norm": 0.39557920489381565, | |
| "learning_rate": 2.5942098634756475e-06, | |
| "loss": 0.0017, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.0848806366047747, | |
| "grad_norm": 2.011210517395444, | |
| "learning_rate": 2.580681409996477e-06, | |
| "loss": 0.0026, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.0875331564986737, | |
| "grad_norm": 11.55596230506205, | |
| "learning_rate": 2.567176042616471e-06, | |
| "loss": 0.0047, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.090185676392573, | |
| "grad_norm": 2.4156310012643103, | |
| "learning_rate": 2.5536938902092056e-06, | |
| "loss": 0.0021, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.092838196286472, | |
| "grad_norm": 0.18764992825176918, | |
| "learning_rate": 2.5402350814267364e-06, | |
| "loss": 0.0017, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.0954907161803713, | |
| "grad_norm": 2.2919461648589556, | |
| "learning_rate": 2.526799744698366e-06, | |
| "loss": 0.0025, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.0981432360742707, | |
| "grad_norm": 7.710549965295524, | |
| "learning_rate": 2.5133880082294155e-06, | |
| "loss": 0.005, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.1007957559681696, | |
| "grad_norm": 0.15012400497585224, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.0016, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.103448275862069, | |
| "grad_norm": 0.08220096963803952, | |
| "learning_rate": 2.486635847763815e-06, | |
| "loss": 0.0014, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.1061007957559683, | |
| "grad_norm": 0.7783894450675292, | |
| "learning_rate": 2.473295679046911e-06, | |
| "loss": 0.0018, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.1087533156498672, | |
| "grad_norm": 0.12477741474784955, | |
| "learning_rate": 2.4599796211464772e-06, | |
| "loss": 0.0015, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.1114058355437666, | |
| "grad_norm": 0.08144394193328244, | |
| "learning_rate": 2.446687801129628e-06, | |
| "loss": 0.0014, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.114058355437666, | |
| "grad_norm": 31.61865013891495, | |
| "learning_rate": 2.433420345832191e-06, | |
| "loss": 0.0235, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.116710875331565, | |
| "grad_norm": 0.16630727907217205, | |
| "learning_rate": 2.4201773818574956e-06, | |
| "loss": 0.0015, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.1193633952254642, | |
| "grad_norm": 0.08241735796809634, | |
| "learning_rate": 2.406959035575166e-06, | |
| "loss": 0.0014, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.1220159151193636, | |
| "grad_norm": 3.062034039881723, | |
| "learning_rate": 2.393765433119913e-06, | |
| "loss": 0.0021, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.1246684350132625, | |
| "grad_norm": 0.11937503096401074, | |
| "learning_rate": 2.3805967003903336e-06, | |
| "loss": 0.0014, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.127320954907162, | |
| "grad_norm": 20.773342722596187, | |
| "learning_rate": 2.3674529630477074e-06, | |
| "loss": 0.0073, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.129973474801061, | |
| "grad_norm": 0.46381492457559487, | |
| "learning_rate": 2.3543343465147956e-06, | |
| "loss": 0.0019, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.13262599469496, | |
| "grad_norm": 0.0804988750293357, | |
| "learning_rate": 2.341240975974653e-06, | |
| "loss": 0.0014, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.1352785145888595, | |
| "grad_norm": 0.3497581554814395, | |
| "learning_rate": 2.328172976369421e-06, | |
| "loss": 0.0016, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.1379310344827585, | |
| "grad_norm": 0.07679137336103294, | |
| "learning_rate": 2.315130472399145e-06, | |
| "loss": 0.0013, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.140583554376658, | |
| "grad_norm": 0.07466622329861163, | |
| "learning_rate": 2.302113588520578e-06, | |
| "loss": 0.0013, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.143236074270557, | |
| "grad_norm": 0.07758030739160703, | |
| "learning_rate": 2.289122448945997e-06, | |
| "loss": 0.0013, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.145888594164456, | |
| "grad_norm": 0.16681181140025278, | |
| "learning_rate": 2.2761571776420187e-06, | |
| "loss": 0.0014, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.1485411140583555, | |
| "grad_norm": 7.399139547650065, | |
| "learning_rate": 2.263217898328415e-06, | |
| "loss": 0.0049, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.151193633952255, | |
| "grad_norm": 0.5942035002264717, | |
| "learning_rate": 2.2503047344769256e-06, | |
| "loss": 0.0014, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.08081617536253838, | |
| "learning_rate": 2.23741780931009e-06, | |
| "loss": 0.0013, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.156498673740053, | |
| "grad_norm": 0.7462752578343561, | |
| "learning_rate": 2.2245572458000714e-06, | |
| "loss": 0.0014, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.159151193633952, | |
| "grad_norm": 19.74892285398778, | |
| "learning_rate": 2.211723166667475e-06, | |
| "loss": 0.006, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.1618037135278514, | |
| "grad_norm": 0.08005713357681245, | |
| "learning_rate": 2.198915694380182e-06, | |
| "loss": 0.0013, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.1644562334217508, | |
| "grad_norm": 0.6071057574038246, | |
| "learning_rate": 2.1861349511521817e-06, | |
| "loss": 0.0014, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.1671087533156497, | |
| "grad_norm": 0.0752186839187155, | |
| "learning_rate": 2.173381058942402e-06, | |
| "loss": 0.0013, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.169761273209549, | |
| "grad_norm": 0.1305584374266705, | |
| "learning_rate": 2.1606541394535528e-06, | |
| "loss": 0.0013, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.1724137931034484, | |
| "grad_norm": 0.07375217036909364, | |
| "learning_rate": 2.147954314130955e-06, | |
| "loss": 0.0013, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.1750663129973473, | |
| "grad_norm": 0.08838482083846465, | |
| "learning_rate": 2.135281704161386e-06, | |
| "loss": 0.0013, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.1777188328912467, | |
| "grad_norm": 0.07360478242627387, | |
| "learning_rate": 2.122636430471926e-06, | |
| "loss": 0.0013, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.180371352785146, | |
| "grad_norm": 0.08514009164619807, | |
| "learning_rate": 2.1100186137288005e-06, | |
| "loss": 0.0013, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.183023872679045, | |
| "grad_norm": 125.92010206347204, | |
| "learning_rate": 2.0974283743362283e-06, | |
| "loss": 0.0039, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.1856763925729443, | |
| "grad_norm": 0.07215971233954403, | |
| "learning_rate": 2.084865832435278e-06, | |
| "loss": 0.0012, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.1883289124668437, | |
| "grad_norm": 0.07285977518445678, | |
| "learning_rate": 2.072331107902713e-06, | |
| "loss": 0.0013, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.1909814323607426, | |
| "grad_norm": 0.07234080373019404, | |
| "learning_rate": 2.0598243203498562e-06, | |
| "loss": 0.0013, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.193633952254642, | |
| "grad_norm": 0.0886750422890646, | |
| "learning_rate": 2.0473455891214416e-06, | |
| "loss": 0.0013, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.1962864721485413, | |
| "grad_norm": 0.07148362497663868, | |
| "learning_rate": 2.034895033294483e-06, | |
| "loss": 0.0012, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.1989389920424403, | |
| "grad_norm": 0.09041579908666639, | |
| "learning_rate": 2.0224727716771297e-06, | |
| "loss": 0.0013, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.2015915119363396, | |
| "grad_norm": 0.26201814358060044, | |
| "learning_rate": 2.0100789228075375e-06, | |
| "loss": 0.0013, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.2042440318302385, | |
| "grad_norm": 0.07092692566307471, | |
| "learning_rate": 1.9977136049527348e-06, | |
| "loss": 0.0012, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 0.7680618172089979, | |
| "learning_rate": 1.9853769361074964e-06, | |
| "loss": 0.0013, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.2095490716180373, | |
| "grad_norm": 0.08149814922086594, | |
| "learning_rate": 1.973069033993223e-06, | |
| "loss": 0.0012, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.212201591511936, | |
| "grad_norm": 0.06918207979942406, | |
| "learning_rate": 1.960790016056801e-06, | |
| "loss": 0.0012, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.2148541114058355, | |
| "grad_norm": 0.06941092834274308, | |
| "learning_rate": 1.9485399994694998e-06, | |
| "loss": 0.0012, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.217506631299735, | |
| "grad_norm": 0.11042988652448069, | |
| "learning_rate": 1.9363191011258426e-06, | |
| "loss": 0.0013, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.220159151193634, | |
| "grad_norm": 0.06929264033253076, | |
| "learning_rate": 1.9241274376425e-06, | |
| "loss": 0.0012, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.222811671087533, | |
| "grad_norm": 0.07191532035559675, | |
| "learning_rate": 1.9119651253571676e-06, | |
| "loss": 0.0012, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.2254641909814326, | |
| "grad_norm": 0.19378968639900365, | |
| "learning_rate": 1.89983228032746e-06, | |
| "loss": 0.0013, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.2281167108753315, | |
| "grad_norm": 32.12461736878749, | |
| "learning_rate": 1.8877290183298058e-06, | |
| "loss": 0.0479, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.43830491728757237, | |
| "learning_rate": 1.8756554548583377e-06, | |
| "loss": 0.0013, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.2334217506631298, | |
| "grad_norm": 0.2187235621150115, | |
| "learning_rate": 1.863611705123798e-06, | |
| "loss": 0.0013, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.236074270557029, | |
| "grad_norm": 0.6143192857944316, | |
| "learning_rate": 1.8515978840524302e-06, | |
| "loss": 0.0014, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.2387267904509285, | |
| "grad_norm": 0.11313871454305385, | |
| "learning_rate": 1.8396141062848877e-06, | |
| "loss": 0.0012, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.2413793103448274, | |
| "grad_norm": 45.11241620874413, | |
| "learning_rate": 1.827660486175139e-06, | |
| "loss": 0.0059, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.2440318302387268, | |
| "grad_norm": 0.06623676243698338, | |
| "learning_rate": 1.8157371377893769e-06, | |
| "loss": 0.0012, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.246684350132626, | |
| "grad_norm": 0.15303018443046673, | |
| "learning_rate": 1.803844174904928e-06, | |
| "loss": 0.0012, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.249336870026525, | |
| "grad_norm": 0.0670464354914666, | |
| "learning_rate": 1.7919817110091691e-06, | |
| "loss": 0.0012, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.2519893899204244, | |
| "grad_norm": 0.06709139284552051, | |
| "learning_rate": 1.7801498592984445e-06, | |
| "loss": 0.0012, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.2546419098143238, | |
| "grad_norm": 1.8527488282182363, | |
| "learning_rate": 1.7683487326769826e-06, | |
| "loss": 0.0017, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.2572944297082227, | |
| "grad_norm": 0.0722005269953955, | |
| "learning_rate": 1.756578443755822e-06, | |
| "loss": 0.0011, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.259946949602122, | |
| "grad_norm": 0.06645532711286986, | |
| "learning_rate": 1.7448391048517378e-06, | |
| "loss": 0.0012, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.2625994694960214, | |
| "grad_norm": 0.06561899843988953, | |
| "learning_rate": 1.7331308279861641e-06, | |
| "loss": 0.0011, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.2652519893899203, | |
| "grad_norm": 0.0661860205744516, | |
| "learning_rate": 1.7214537248841317e-06, | |
| "loss": 0.0012, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.2679045092838197, | |
| "grad_norm": 0.6079953038884391, | |
| "learning_rate": 1.709807906973196e-06, | |
| "loss": 0.0014, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.270557029177719, | |
| "grad_norm": 0.06488881068760848, | |
| "learning_rate": 1.6981934853823796e-06, | |
| "loss": 0.0011, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.273209549071618, | |
| "grad_norm": 0.06797124970633543, | |
| "learning_rate": 1.6866105709411069e-06, | |
| "loss": 0.0011, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.2758620689655173, | |
| "grad_norm": 76.16571947400874, | |
| "learning_rate": 1.6750592741781496e-06, | |
| "loss": 0.3501, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.2785145888594163, | |
| "grad_norm": 0.06558683917842384, | |
| "learning_rate": 1.6635397053205704e-06, | |
| "loss": 0.0011, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.2811671087533156, | |
| "grad_norm": 0.09655686844705165, | |
| "learning_rate": 1.6520519742926705e-06, | |
| "loss": 0.0011, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.283819628647215, | |
| "grad_norm": 0.0686245128786292, | |
| "learning_rate": 1.640596190714947e-06, | |
| "loss": 0.0012, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.286472148541114, | |
| "grad_norm": 0.0650546943127916, | |
| "learning_rate": 1.6291724639030353e-06, | |
| "loss": 0.0011, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.2891246684350133, | |
| "grad_norm": 208.7702949244788, | |
| "learning_rate": 1.6177809028666769e-06, | |
| "loss": 0.012, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.2917771883289126, | |
| "grad_norm": 29.89517410626624, | |
| "learning_rate": 1.6064216163086716e-06, | |
| "loss": 0.0623, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.2944297082228116, | |
| "grad_norm": 0.0653400164041259, | |
| "learning_rate": 1.595094712623843e-06, | |
| "loss": 0.0011, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.297082228116711, | |
| "grad_norm": 0.06671777147106228, | |
| "learning_rate": 1.5838002998980107e-06, | |
| "loss": 0.0011, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.29973474801061, | |
| "grad_norm": 0.08549683500973079, | |
| "learning_rate": 1.5725384859069454e-06, | |
| "loss": 0.0011, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.302387267904509, | |
| "grad_norm": 1.6239494853557073, | |
| "learning_rate": 1.5613093781153503e-06, | |
| "loss": 0.0015, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.3050397877984086, | |
| "grad_norm": 0.06268027521119812, | |
| "learning_rate": 1.550113083675836e-06, | |
| "loss": 0.0011, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 21.69711575041162, | |
| "learning_rate": 1.5389497094278861e-06, | |
| "loss": 0.007, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.310344827586207, | |
| "grad_norm": 0.06701049997320575, | |
| "learning_rate": 1.5278193618968584e-06, | |
| "loss": 0.0011, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.312997347480106, | |
| "grad_norm": 0.07382333923573142, | |
| "learning_rate": 1.5167221472929489e-06, | |
| "loss": 0.0011, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.315649867374005, | |
| "grad_norm": 76.7596193056003, | |
| "learning_rate": 1.5056581715101887e-06, | |
| "loss": 0.0647, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.3183023872679045, | |
| "grad_norm": 15.64396853251376, | |
| "learning_rate": 1.4946275401254301e-06, | |
| "loss": 0.0063, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.320954907161804, | |
| "grad_norm": 0.1214125458833792, | |
| "learning_rate": 1.4836303583973384e-06, | |
| "loss": 0.0011, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.323607427055703, | |
| "grad_norm": 122.35872430226972, | |
| "learning_rate": 1.472666731265394e-06, | |
| "loss": 0.0435, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.326259946949602, | |
| "grad_norm": 0.06264186932736737, | |
| "learning_rate": 1.4617367633488816e-06, | |
| "loss": 0.0011, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.3289124668435015, | |
| "grad_norm": 0.06412223063437182, | |
| "learning_rate": 1.4508405589458968e-06, | |
| "loss": 0.0011, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.3315649867374004, | |
| "grad_norm": 0.06418332438027141, | |
| "learning_rate": 1.4399782220323515e-06, | |
| "loss": 0.0011, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.3342175066313, | |
| "grad_norm": 0.06185564601567612, | |
| "learning_rate": 1.4291498562609802e-06, | |
| "loss": 0.0011, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.336870026525199, | |
| "grad_norm": 0.06336656034657022, | |
| "learning_rate": 1.4183555649603503e-06, | |
| "loss": 0.0011, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.339522546419098, | |
| "grad_norm": 0.06146985918004954, | |
| "learning_rate": 1.4075954511338784e-06, | |
| "loss": 0.0011, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.3421750663129974, | |
| "grad_norm": 0.08053700437359687, | |
| "learning_rate": 1.396869617458846e-06, | |
| "loss": 0.0011, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.344827586206897, | |
| "grad_norm": 0.5218052672812636, | |
| "learning_rate": 1.3861781662854162e-06, | |
| "loss": 0.0012, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.3474801061007957, | |
| "grad_norm": 1.5216995484373934, | |
| "learning_rate": 1.3755211996356687e-06, | |
| "loss": 0.0015, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.350132625994695, | |
| "grad_norm": 0.0621162072022446, | |
| "learning_rate": 1.3648988192026108e-06, | |
| "loss": 0.0011, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.352785145888594, | |
| "grad_norm": 0.061884930132394454, | |
| "learning_rate": 1.3543111263492165e-06, | |
| "loss": 0.0011, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.3554376657824934, | |
| "grad_norm": 0.06028055456747343, | |
| "learning_rate": 1.3437582221074574e-06, | |
| "loss": 0.001, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.3580901856763927, | |
| "grad_norm": 0.08255518588823123, | |
| "learning_rate": 1.3332402071773376e-06, | |
| "loss": 0.0011, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.3607427055702916, | |
| "grad_norm": 0.0660571988137438, | |
| "learning_rate": 1.322757181925937e-06, | |
| "loss": 0.0011, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.363395225464191, | |
| "grad_norm": 5.704097339238524, | |
| "learning_rate": 1.3123092463864456e-06, | |
| "loss": 0.0013, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 2.3660477453580904, | |
| "grad_norm": 139.48648832304622, | |
| "learning_rate": 1.301896500257217e-06, | |
| "loss": 0.1495, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.3687002652519893, | |
| "grad_norm": 0.06562913191383259, | |
| "learning_rate": 1.2915190429008084e-06, | |
| "loss": 0.0011, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 2.3713527851458887, | |
| "grad_norm": 0.06094523998371494, | |
| "learning_rate": 1.2811769733430406e-06, | |
| "loss": 0.0011, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.3740053050397876, | |
| "grad_norm": 0.06227692335984811, | |
| "learning_rate": 1.2708703902720538e-06, | |
| "loss": 0.001, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.376657824933687, | |
| "grad_norm": 0.06695392791086113, | |
| "learning_rate": 1.260599392037356e-06, | |
| "loss": 0.0011, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.3793103448275863, | |
| "grad_norm": 0.21611121807915867, | |
| "learning_rate": 1.250364076648894e-06, | |
| "loss": 0.0012, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.381962864721485, | |
| "grad_norm": 1.8698349335731828, | |
| "learning_rate": 1.2401645417761126e-06, | |
| "loss": 0.0014, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.3846153846153846, | |
| "grad_norm": 0.5713724389512606, | |
| "learning_rate": 1.2300008847470252e-06, | |
| "loss": 0.0012, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.387267904509284, | |
| "grad_norm": 47.11357878755216, | |
| "learning_rate": 1.2198732025472876e-06, | |
| "loss": 0.1677, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.389920424403183, | |
| "grad_norm": 0.06090183448028676, | |
| "learning_rate": 1.2097815918192652e-06, | |
| "loss": 0.001, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.3925729442970822, | |
| "grad_norm": 0.060156871686836734, | |
| "learning_rate": 1.1997261488611173e-06, | |
| "loss": 0.001, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.3952254641909816, | |
| "grad_norm": 0.06807379303255552, | |
| "learning_rate": 1.1897069696258756e-06, | |
| "loss": 0.001, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.3978779840848805, | |
| "grad_norm": 0.07237555713720957, | |
| "learning_rate": 1.1797241497205285e-06, | |
| "loss": 0.0011, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.40053050397878, | |
| "grad_norm": 0.06004611220653036, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.001, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.4031830238726792, | |
| "grad_norm": 0.05909328032696501, | |
| "learning_rate": 1.1598679685917901e-06, | |
| "loss": 0.001, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.405835543766578, | |
| "grad_norm": 0.0611763251688093, | |
| "learning_rate": 1.1499947968439673e-06, | |
| "loss": 0.001, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.4084880636604775, | |
| "grad_norm": 0.07785505410015264, | |
| "learning_rate": 1.1401583633753683e-06, | |
| "loss": 0.001, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.411140583554377, | |
| "grad_norm": 0.07389775729807248, | |
| "learning_rate": 1.1303587620491513e-06, | |
| "loss": 0.001, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 0.07855752742958044, | |
| "learning_rate": 1.120596086377005e-06, | |
| "loss": 0.001, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.416445623342175, | |
| "grad_norm": 0.4890171657386728, | |
| "learning_rate": 1.1108704295182582e-06, | |
| "loss": 0.0012, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.419098143236074, | |
| "grad_norm": 41.63509191196258, | |
| "learning_rate": 1.1011818842789928e-06, | |
| "loss": 0.0063, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.4217506631299734, | |
| "grad_norm": 0.06306195603906277, | |
| "learning_rate": 1.0915305431111561e-06, | |
| "loss": 0.001, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.424403183023873, | |
| "grad_norm": 0.9707371949916439, | |
| "learning_rate": 1.0819164981116825e-06, | |
| "loss": 0.0012, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.4270557029177717, | |
| "grad_norm": 0.0811953522176445, | |
| "learning_rate": 1.0723398410216085e-06, | |
| "loss": 0.001, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.429708222811671, | |
| "grad_norm": 0.06384966998157086, | |
| "learning_rate": 1.0628006632251975e-06, | |
| "loss": 0.001, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.4323607427055705, | |
| "grad_norm": 0.05995114120465396, | |
| "learning_rate": 1.0532990557490768e-06, | |
| "loss": 0.001, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.4350132625994694, | |
| "grad_norm": 2.4610924632978253, | |
| "learning_rate": 1.043835109261357e-06, | |
| "loss": 0.0016, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.4376657824933687, | |
| "grad_norm": 0.05878584207452931, | |
| "learning_rate": 1.034408914070779e-06, | |
| "loss": 0.001, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.4403183023872677, | |
| "grad_norm": 0.0578918748475833, | |
| "learning_rate": 1.0250205601258407e-06, | |
| "loss": 0.001, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.442970822281167, | |
| "grad_norm": 126.67531388600968, | |
| "learning_rate": 1.0156701370139454e-06, | |
| "loss": 0.0323, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.4456233421750664, | |
| "grad_norm": 0.08277846289252522, | |
| "learning_rate": 1.0063577339605452e-06, | |
| "loss": 0.001, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.4482758620689653, | |
| "grad_norm": 0.05811273002395958, | |
| "learning_rate": 9.970834398282887e-07, | |
| "loss": 0.001, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.4509283819628647, | |
| "grad_norm": 0.06580267260863659, | |
| "learning_rate": 9.878473431161767e-07, | |
| "loss": 0.001, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.453580901856764, | |
| "grad_norm": 0.1232830259114401, | |
| "learning_rate": 9.786495319587136e-07, | |
| "loss": 0.001, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.456233421750663, | |
| "grad_norm": 0.28685021836803104, | |
| "learning_rate": 9.694900941250674e-07, | |
| "loss": 0.0011, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.4588859416445623, | |
| "grad_norm": 0.0628208200867451, | |
| "learning_rate": 9.603691170182316e-07, | |
| "loss": 0.001, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.05767179007096357, | |
| "learning_rate": 9.512866876741949e-07, | |
| "loss": 0.001, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.4641909814323606, | |
| "grad_norm": 0.05651101689159219, | |
| "learning_rate": 9.42242892761106e-07, | |
| "loss": 0.001, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.46684350132626, | |
| "grad_norm": 0.1312083202582577, | |
| "learning_rate": 9.332378185784491e-07, | |
| "loss": 0.001, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.4694960212201593, | |
| "grad_norm": 0.0794531209151415, | |
| "learning_rate": 9.242715510562195e-07, | |
| "loss": 0.001, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.4721485411140582, | |
| "grad_norm": 0.06758011176979471, | |
| "learning_rate": 9.153441757541026e-07, | |
| "loss": 0.001, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.4748010610079576, | |
| "grad_norm": 0.05628258012555615, | |
| "learning_rate": 9.064557778606631e-07, | |
| "loss": 0.001, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.477453580901857, | |
| "grad_norm": 0.8798413491531053, | |
| "learning_rate": 8.97606442192524e-07, | |
| "loss": 0.0013, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.480106100795756, | |
| "grad_norm": 0.058598663584796984, | |
| "learning_rate": 8.887962531935612e-07, | |
| "loss": 0.001, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.4827586206896552, | |
| "grad_norm": 0.09406748583021195, | |
| "learning_rate": 8.800252949340998e-07, | |
| "loss": 0.001, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.4854111405835546, | |
| "grad_norm": 0.11316186423995639, | |
| "learning_rate": 8.712936511101056e-07, | |
| "loss": 0.001, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.4880636604774535, | |
| "grad_norm": 0.05689090074692826, | |
| "learning_rate": 8.62601405042397e-07, | |
| "loss": 0.001, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.490716180371353, | |
| "grad_norm": 0.05648519376070825, | |
| "learning_rate": 8.539486396758357e-07, | |
| "loss": 0.001, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.493368700265252, | |
| "grad_norm": 16.679551063134323, | |
| "learning_rate": 8.453354375785477e-07, | |
| "loss": 0.0074, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.496021220159151, | |
| "grad_norm": 3.5045054099899935, | |
| "learning_rate": 8.367618809411299e-07, | |
| "loss": 0.0014, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.4986737400530505, | |
| "grad_norm": 0.0629968232537991, | |
| "learning_rate": 8.282280515758639e-07, | |
| "loss": 0.001, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.5013262599469495, | |
| "grad_norm": 0.056459018233367006, | |
| "learning_rate": 8.197340309159429e-07, | |
| "loss": 0.001, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.503978779840849, | |
| "grad_norm": 0.057128628619581345, | |
| "learning_rate": 8.112799000146853e-07, | |
| "loss": 0.001, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.5066312997347477, | |
| "grad_norm": 9.50454174227497, | |
| "learning_rate": 8.02865739544767e-07, | |
| "loss": 0.0016, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.509283819628647, | |
| "grad_norm": 5.4634210425161855, | |
| "learning_rate": 7.944916297974498e-07, | |
| "loss": 0.0052, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.5119363395225465, | |
| "grad_norm": 0.05572183180594424, | |
| "learning_rate": 7.861576506818147e-07, | |
| "loss": 0.001, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.5145888594164454, | |
| "grad_norm": 0.05649628277552702, | |
| "learning_rate": 7.778638817240042e-07, | |
| "loss": 0.001, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.5172413793103448, | |
| "grad_norm": 0.05583993949613943, | |
| "learning_rate": 7.696104020664552e-07, | |
| "loss": 0.001, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.519893899204244, | |
| "grad_norm": 0.06926676295906672, | |
| "learning_rate": 7.613972904671496e-07, | |
| "loss": 0.001, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.522546419098143, | |
| "grad_norm": 0.33919856089089423, | |
| "learning_rate": 7.532246252988617e-07, | |
| "loss": 0.0011, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.5251989389920424, | |
| "grad_norm": 0.0568461133850681, | |
| "learning_rate": 7.450924845484092e-07, | |
| "loss": 0.001, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.5278514588859418, | |
| "grad_norm": 0.05561015970931866, | |
| "learning_rate": 7.370009458159099e-07, | |
| "loss": 0.001, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.5305039787798407, | |
| "grad_norm": 193.0480706166829, | |
| "learning_rate": 7.289500863140414e-07, | |
| "loss": 0.0609, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.53315649867374, | |
| "grad_norm": 0.05494039154416162, | |
| "learning_rate": 7.20939982867303e-07, | |
| "loss": 0.001, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.5358090185676394, | |
| "grad_norm": 5.829073488339818, | |
| "learning_rate": 7.129707119112838e-07, | |
| "loss": 0.0017, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.05435253762937014, | |
| "learning_rate": 7.05042349491935e-07, | |
| "loss": 0.0009, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.5411140583554377, | |
| "grad_norm": 0.055234367394669366, | |
| "learning_rate": 6.971549712648401e-07, | |
| "loss": 0.001, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.543766578249337, | |
| "grad_norm": 0.06126906513727519, | |
| "learning_rate": 6.893086524944953e-07, | |
| "loss": 0.001, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.546419098143236, | |
| "grad_norm": 0.055051354699559824, | |
| "learning_rate": 6.815034680535915e-07, | |
| "loss": 0.001, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.5490716180371353, | |
| "grad_norm": 0.054625158391608, | |
| "learning_rate": 6.737394924223e-07, | |
| "loss": 0.001, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.5517241379310347, | |
| "grad_norm": 0.05462583302647743, | |
| "learning_rate": 6.660167996875605e-07, | |
| "loss": 0.001, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.5543766578249336, | |
| "grad_norm": 0.0539700747311138, | |
| "learning_rate": 6.583354635423755e-07, | |
| "loss": 0.0009, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.557029177718833, | |
| "grad_norm": 0.06025288814861885, | |
| "learning_rate": 6.506955572851059e-07, | |
| "loss": 0.0009, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.5596816976127323, | |
| "grad_norm": 2.8245737115251823, | |
| "learning_rate": 6.430971538187725e-07, | |
| "loss": 0.0015, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.5623342175066313, | |
| "grad_norm": 0.11963108172214902, | |
| "learning_rate": 6.355403256503595e-07, | |
| "loss": 0.001, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.5649867374005306, | |
| "grad_norm": 0.09183752042364275, | |
| "learning_rate": 6.280251448901253e-07, | |
| "loss": 0.001, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.5676392572944295, | |
| "grad_norm": 0.054134332400518646, | |
| "learning_rate": 6.205516832509089e-07, | |
| "loss": 0.0009, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.570291777188329, | |
| "grad_norm": 0.05426296615344519, | |
| "learning_rate": 6.131200120474512e-07, | |
| "loss": 0.0009, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.5729442970822283, | |
| "grad_norm": 0.05419212137654628, | |
| "learning_rate": 6.057302021957113e-07, | |
| "loss": 0.0009, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.575596816976127, | |
| "grad_norm": 0.09188409544827594, | |
| "learning_rate": 5.983823242121888e-07, | |
| "loss": 0.001, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.5782493368700266, | |
| "grad_norm": 0.05510727973349538, | |
| "learning_rate": 5.910764482132575e-07, | |
| "loss": 0.001, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.5809018567639255, | |
| "grad_norm": 0.05667808363822762, | |
| "learning_rate": 5.838126439144875e-07, | |
| "loss": 0.001, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.583554376657825, | |
| "grad_norm": 0.05447871452441066, | |
| "learning_rate": 5.765909806299863e-07, | |
| "loss": 0.0009, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.586206896551724, | |
| "grad_norm": 0.05427308536099464, | |
| "learning_rate": 5.694115272717326e-07, | |
| "loss": 0.0009, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.588859416445623, | |
| "grad_norm": 2.240544163367182, | |
| "learning_rate": 5.622743523489216e-07, | |
| "loss": 0.0015, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.5915119363395225, | |
| "grad_norm": 0.055255049579976116, | |
| "learning_rate": 5.551795239673146e-07, | |
| "loss": 0.0009, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.594164456233422, | |
| "grad_norm": 0.055423684292052425, | |
| "learning_rate": 5.481271098285818e-07, | |
| "loss": 0.001, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.5968169761273208, | |
| "grad_norm": 0.054617622358565654, | |
| "learning_rate": 5.411171772296609e-07, | |
| "loss": 0.001, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.59946949602122, | |
| "grad_norm": 0.14999271213831958, | |
| "learning_rate": 5.34149793062112e-07, | |
| "loss": 0.001, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.6021220159151195, | |
| "grad_norm": 1.167126402354413, | |
| "learning_rate": 5.272250238114857e-07, | |
| "loss": 0.0011, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.6047745358090184, | |
| "grad_norm": 0.05627867368251513, | |
| "learning_rate": 5.203429355566797e-07, | |
| "loss": 0.0009, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.6074270557029178, | |
| "grad_norm": 84.0806645690484, | |
| "learning_rate": 5.13503593969315e-07, | |
| "loss": 0.1598, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.610079575596817, | |
| "grad_norm": 1.1328372073341084, | |
| "learning_rate": 5.067070643131056e-07, | |
| "loss": 0.0013, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.612732095490716, | |
| "grad_norm": 0.0796073800123807, | |
| "learning_rate": 4.999534114432386e-07, | |
| "loss": 0.001, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.6153846153846154, | |
| "grad_norm": 0.05328800469497512, | |
| "learning_rate": 4.932426998057516e-07, | |
| "loss": 0.0009, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.618037135278515, | |
| "grad_norm": 0.05451165857633103, | |
| "learning_rate": 4.865749934369224e-07, | |
| "loss": 0.0009, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 0.05664990591190814, | |
| "learning_rate": 4.799503559626528e-07, | |
| "loss": 0.0009, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.623342175066313, | |
| "grad_norm": 0.3250507408971192, | |
| "learning_rate": 4.733688505978673e-07, | |
| "loss": 0.001, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.6259946949602124, | |
| "grad_norm": 0.05434258563183708, | |
| "learning_rate": 4.668305401459022e-07, | |
| "loss": 0.0009, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.6286472148541113, | |
| "grad_norm": 0.061283545784569245, | |
| "learning_rate": 4.603354869979165e-07, | |
| "loss": 0.0009, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.6312997347480107, | |
| "grad_norm": 0.053706180235464154, | |
| "learning_rate": 4.5388375313228595e-07, | |
| "loss": 0.0009, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.63395225464191, | |
| "grad_norm": 0.05378315775577338, | |
| "learning_rate": 4.4747540011401913e-07, | |
| "loss": 0.0009, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.636604774535809, | |
| "grad_norm": 0.22382515301532738, | |
| "learning_rate": 4.4111048909416644e-07, | |
| "loss": 0.001, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.6392572944297084, | |
| "grad_norm": 0.05566658839487468, | |
| "learning_rate": 4.347890808092359e-07, | |
| "loss": 0.0009, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.6419098143236073, | |
| "grad_norm": 0.08617493762139619, | |
| "learning_rate": 4.2851123558061927e-07, | |
| "loss": 0.0009, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.6445623342175066, | |
| "grad_norm": 0.14979392407078365, | |
| "learning_rate": 4.2227701331400974e-07, | |
| "loss": 0.0009, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.647214854111406, | |
| "grad_norm": 0.2293056663517138, | |
| "learning_rate": 4.1608647349883123e-07, | |
| "loss": 0.001, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.649867374005305, | |
| "grad_norm": 0.05731999235079868, | |
| "learning_rate": 4.0993967520767455e-07, | |
| "loss": 0.0009, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.6525198938992043, | |
| "grad_norm": 0.07648364037970812, | |
| "learning_rate": 4.0383667709573083e-07, | |
| "loss": 0.0009, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.655172413793103, | |
| "grad_norm": 1.7607923873864444, | |
| "learning_rate": 3.9777753740023404e-07, | |
| "loss": 0.0021, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.6578249336870026, | |
| "grad_norm": 0.0775365755949617, | |
| "learning_rate": 3.9176231393990183e-07, | |
| "loss": 0.0009, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.660477453580902, | |
| "grad_norm": 0.05311175699372582, | |
| "learning_rate": 3.8579106411438636e-07, | |
| "loss": 0.0009, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.663129973474801, | |
| "grad_norm": 0.11581146565084956, | |
| "learning_rate": 3.7986384490372395e-07, | |
| "loss": 0.0009, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.6657824933687, | |
| "grad_norm": 0.21597509677221616, | |
| "learning_rate": 3.739807128677986e-07, | |
| "loss": 0.001, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.6684350132625996, | |
| "grad_norm": 0.1286558762259199, | |
| "learning_rate": 3.6814172414579075e-07, | |
| "loss": 0.001, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.6710875331564985, | |
| "grad_norm": 0.5351619199712117, | |
| "learning_rate": 3.6234693445565185e-07, | |
| "loss": 0.001, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.673740053050398, | |
| "grad_norm": 0.05331786914670257, | |
| "learning_rate": 3.5659639909356725e-07, | |
| "loss": 0.0009, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.676392572944297, | |
| "grad_norm": 0.06103172326099002, | |
| "learning_rate": 3.5089017293342965e-07, | |
| "loss": 0.0009, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.679045092838196, | |
| "grad_norm": 0.06613660996010433, | |
| "learning_rate": 3.45228310426316e-07, | |
| "loss": 0.0009, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.6816976127320955, | |
| "grad_norm": 0.0536713030282571, | |
| "learning_rate": 3.39610865599968e-07, | |
| "loss": 0.0009, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.684350132625995, | |
| "grad_norm": 1.483789679555465, | |
| "learning_rate": 3.34037892058276e-07, | |
| "loss": 0.0016, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.687002652519894, | |
| "grad_norm": 0.12782926147594997, | |
| "learning_rate": 3.285094429807673e-07, | |
| "loss": 0.0009, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.689655172413793, | |
| "grad_norm": 0.06823393720694111, | |
| "learning_rate": 3.230255711220992e-07, | |
| "loss": 0.0009, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.6923076923076925, | |
| "grad_norm": 0.05299592095410251, | |
| "learning_rate": 3.175863288115566e-07, | |
| "loss": 0.0009, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.6949602122015914, | |
| "grad_norm": 0.057694825533013465, | |
| "learning_rate": 3.121917679525505e-07, | |
| "loss": 0.0009, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.697612732095491, | |
| "grad_norm": 0.053518700931445305, | |
| "learning_rate": 3.0684194002212287e-07, | |
| "loss": 0.0009, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.70026525198939, | |
| "grad_norm": 3.195820329025052, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.0017, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.702917771883289, | |
| "grad_norm": 0.05446527273973968, | |
| "learning_rate": 2.962766867203926e-07, | |
| "loss": 0.0009, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.7055702917771884, | |
| "grad_norm": 1.4468641306749286, | |
| "learning_rate": 2.910613621669356e-07, | |
| "loss": 0.0013, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.708222811671088, | |
| "grad_norm": 0.052517541806575904, | |
| "learning_rate": 2.8589097217678383e-07, | |
| "loss": 0.0009, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.7108753315649867, | |
| "grad_norm": 0.05317169513735476, | |
| "learning_rate": 2.807655660878533e-07, | |
| "loss": 0.0009, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.713527851458886, | |
| "grad_norm": 0.06655482238956349, | |
| "learning_rate": 2.756851928088056e-07, | |
| "loss": 0.0009, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.716180371352785, | |
| "grad_norm": 0.053219968452906045, | |
| "learning_rate": 2.706499008185798e-07, | |
| "loss": 0.0009, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.7188328912466844, | |
| "grad_norm": 1.2989529905000294, | |
| "learning_rate": 2.6565973816593424e-07, | |
| "loss": 0.0013, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.7214854111405833, | |
| "grad_norm": 0.05269752151551146, | |
| "learning_rate": 2.607147524689829e-07, | |
| "loss": 0.0009, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.7241379310344827, | |
| "grad_norm": 4.045459700751683, | |
| "learning_rate": 2.558149909147434e-07, | |
| "loss": 0.0032, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.726790450928382, | |
| "grad_norm": 0.05492134016720336, | |
| "learning_rate": 2.5096050025868734e-07, | |
| "loss": 0.0009, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.729442970822281, | |
| "grad_norm": 0.053646721730707204, | |
| "learning_rate": 2.461513268242938e-07, | |
| "loss": 0.0009, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.7320954907161803, | |
| "grad_norm": 0.052914877380319573, | |
| "learning_rate": 2.4138751650260585e-07, | |
| "loss": 0.0009, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.7347480106100797, | |
| "grad_norm": 0.07124315478777281, | |
| "learning_rate": 2.366691147517941e-07, | |
| "loss": 0.0009, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.7374005305039786, | |
| "grad_norm": 0.05595575110643659, | |
| "learning_rate": 2.3199616659672352e-07, | |
| "loss": 0.0009, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.740053050397878, | |
| "grad_norm": 0.05263344966511017, | |
| "learning_rate": 2.2736871662852045e-07, | |
| "loss": 0.0009, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.7427055702917773, | |
| "grad_norm": 0.07728000008037111, | |
| "learning_rate": 2.2278680900415183e-07, | |
| "loss": 0.0009, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.7453580901856762, | |
| "grad_norm": 12.460780762688533, | |
| "learning_rate": 2.1825048744600062e-07, | |
| "loss": 0.0047, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.7480106100795756, | |
| "grad_norm": 0.05353882363770759, | |
| "learning_rate": 2.1375979524144942e-07, | |
| "loss": 0.0009, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.750663129973475, | |
| "grad_norm": 0.05303639510725005, | |
| "learning_rate": 2.093147752424668e-07, | |
| "loss": 0.0009, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.753315649867374, | |
| "grad_norm": 0.0535875347321289, | |
| "learning_rate": 2.0491546986519896e-07, | |
| "loss": 0.0009, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.7559681697612732, | |
| "grad_norm": 0.25982418588180967, | |
| "learning_rate": 2.0056192108956762e-07, | |
| "loss": 0.001, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.7586206896551726, | |
| "grad_norm": 3.4179988636523007, | |
| "learning_rate": 1.962541704588633e-07, | |
| "loss": 0.0013, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.7612732095490715, | |
| "grad_norm": 0.0531298644994267, | |
| "learning_rate": 1.9199225907935492e-07, | |
| "loss": 0.0009, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.763925729442971, | |
| "grad_norm": 0.19402946963760118, | |
| "learning_rate": 1.8777622761989355e-07, | |
| "loss": 0.0009, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.7665782493368702, | |
| "grad_norm": 0.05266794784629391, | |
| "learning_rate": 1.8360611631152602e-07, | |
| "loss": 0.0009, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.11251991386409466, | |
| "learning_rate": 1.794819649471119e-07, | |
| "loss": 0.0009, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.7718832891246685, | |
| "grad_norm": 0.07494694405757234, | |
| "learning_rate": 1.7540381288094154e-07, | |
| "loss": 0.0009, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.774535809018568, | |
| "grad_norm": 0.05866658843140271, | |
| "learning_rate": 1.7137169902836203e-07, | |
| "loss": 0.0009, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.777188328912467, | |
| "grad_norm": 0.6591494820631454, | |
| "learning_rate": 1.6738566186540628e-07, | |
| "loss": 0.001, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.779840848806366, | |
| "grad_norm": 0.053120743227235555, | |
| "learning_rate": 1.6344573942842333e-07, | |
| "loss": 0.0009, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.782493368700265, | |
| "grad_norm": 0.05209145348520484, | |
| "learning_rate": 1.5955196931371985e-07, | |
| "loss": 0.0009, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.7851458885941645, | |
| "grad_norm": 0.05305398481377058, | |
| "learning_rate": 1.5570438867719695e-07, | |
| "loss": 0.0009, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.787798408488064, | |
| "grad_norm": 0.05171852149864696, | |
| "learning_rate": 1.5190303423399722e-07, | |
| "loss": 0.0009, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.7904509283819627, | |
| "grad_norm": 0.07918897798668659, | |
| "learning_rate": 1.4814794225815443e-07, | |
| "loss": 0.0009, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.793103448275862, | |
| "grad_norm": 93.58069225282338, | |
| "learning_rate": 1.4443914858224938e-07, | |
| "loss": 0.2561, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.795755968169761, | |
| "grad_norm": 0.053878150878539866, | |
| "learning_rate": 1.4077668859706407e-07, | |
| "loss": 0.0009, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.7984084880636604, | |
| "grad_norm": 0.05257860875770833, | |
| "learning_rate": 1.3716059725124687e-07, | |
| "loss": 0.0009, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.8010610079575597, | |
| "grad_norm": 0.08796784767244706, | |
| "learning_rate": 1.335909090509785e-07, | |
| "loss": 0.0009, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.8037135278514587, | |
| "grad_norm": 0.055315059528931385, | |
| "learning_rate": 1.300676580596405e-07, | |
| "loss": 0.0009, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.806366047745358, | |
| "grad_norm": 0.052615718274800716, | |
| "learning_rate": 1.2659087789749557e-07, | |
| "loss": 0.0009, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.8090185676392574, | |
| "grad_norm": 0.8818547244938975, | |
| "learning_rate": 1.2316060174136e-07, | |
| "loss": 0.0013, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.8116710875331563, | |
| "grad_norm": 0.05258280180456069, | |
| "learning_rate": 1.197768623242923e-07, | |
| "loss": 0.0009, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.8143236074270557, | |
| "grad_norm": 0.05269544838709277, | |
| "learning_rate": 1.1643969193527783e-07, | |
| "loss": 0.0009, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.816976127320955, | |
| "grad_norm": 0.055977552732450954, | |
| "learning_rate": 1.1314912241892184e-07, | |
| "loss": 0.0009, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.819628647214854, | |
| "grad_norm": 0.4988239402449866, | |
| "learning_rate": 1.0990518517514759e-07, | |
| "loss": 0.0011, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.8222811671087533, | |
| "grad_norm": 0.05136956191107987, | |
| "learning_rate": 1.0670791115889146e-07, | |
| "loss": 0.0009, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.8249336870026527, | |
| "grad_norm": 0.05252309276464186, | |
| "learning_rate": 1.035573308798138e-07, | |
| "loss": 0.0009, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 0.053164137556096884, | |
| "learning_rate": 1.0045347440200192e-07, | |
| "loss": 0.0009, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.830238726790451, | |
| "grad_norm": 0.0520539785498693, | |
| "learning_rate": 9.739637134368817e-08, | |
| "loss": 0.0009, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.8328912466843503, | |
| "grad_norm": 0.05131132586881724, | |
| "learning_rate": 9.43860508769645e-08, | |
| "loss": 0.0009, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.8355437665782492, | |
| "grad_norm": 0.052547889240969016, | |
| "learning_rate": 9.142254172750498e-08, | |
| "loss": 0.0009, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.8381962864721486, | |
| "grad_norm": 0.05356818930213062, | |
| "learning_rate": 8.850587217429096e-08, | |
| "loss": 0.0009, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.840848806366048, | |
| "grad_norm": 0.05281190171358196, | |
| "learning_rate": 8.563607004934193e-08, | |
| "loss": 0.0009, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.843501326259947, | |
| "grad_norm": 0.21186282791791453, | |
| "learning_rate": 8.281316273744955e-08, | |
| "loss": 0.001, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.05324233857203547, | |
| "learning_rate": 8.003717717591786e-08, | |
| "loss": 0.0009, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.8488063660477456, | |
| "grad_norm": 0.05163807966434934, | |
| "learning_rate": 7.730813985430407e-08, | |
| "loss": 0.0009, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.8514588859416445, | |
| "grad_norm": 0.0890978599720558, | |
| "learning_rate": 7.4626076814166e-08, | |
| "loss": 0.001, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.854111405835544, | |
| "grad_norm": 0.053259104192480915, | |
| "learning_rate": 7.199101364881389e-08, | |
| "loss": 0.0009, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.856763925729443, | |
| "grad_norm": 0.05231435936161711, | |
| "learning_rate": 6.940297550306895e-08, | |
| "loss": 0.0009, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.859416445623342, | |
| "grad_norm": 0.19533030749474406, | |
| "learning_rate": 6.686198707301861e-08, | |
| "loss": 0.001, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.862068965517241, | |
| "grad_norm": 18.458776333269714, | |
| "learning_rate": 6.436807260578437e-08, | |
| "loss": 0.0047, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.8647214854111405, | |
| "grad_norm": 0.05118343115403007, | |
| "learning_rate": 6.192125589928821e-08, | |
| "loss": 0.0009, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.86737400530504, | |
| "grad_norm": 0.05541616336363455, | |
| "learning_rate": 5.952156030202716e-08, | |
| "loss": 0.0009, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.8700265251989387, | |
| "grad_norm": 0.056049761657979084, | |
| "learning_rate": 5.7169008712851245e-08, | |
| "loss": 0.0009, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.872679045092838, | |
| "grad_norm": 0.06719700505364218, | |
| "learning_rate": 5.486362358074093e-08, | |
| "loss": 0.0009, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.8753315649867375, | |
| "grad_norm": 0.05192255208597775, | |
| "learning_rate": 5.2605426904598356e-08, | |
| "loss": 0.0009, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.8779840848806364, | |
| "grad_norm": 0.0642612322291816, | |
| "learning_rate": 5.0394440233031975e-08, | |
| "loss": 0.0009, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.8806366047745358, | |
| "grad_norm": 0.05383635370381051, | |
| "learning_rate": 4.823068466415615e-08, | |
| "loss": 0.0009, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.883289124668435, | |
| "grad_norm": 0.06186926525340373, | |
| "learning_rate": 4.611418084538577e-08, | |
| "loss": 0.0009, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.885941644562334, | |
| "grad_norm": 0.05261394296745666, | |
| "learning_rate": 4.4044948973240855e-08, | |
| "loss": 0.0009, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.8885941644562334, | |
| "grad_norm": 0.051805840583547105, | |
| "learning_rate": 4.202300879315446e-08, | |
| "loss": 0.0009, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.8912466843501328, | |
| "grad_norm": 0.05127305205775271, | |
| "learning_rate": 4.004837959928287e-08, | |
| "loss": 0.0009, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.8938992042440317, | |
| "grad_norm": 0.17569307264006073, | |
| "learning_rate": 3.8121080234322374e-08, | |
| "loss": 0.001, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.896551724137931, | |
| "grad_norm": 2.0806965078994732, | |
| "learning_rate": 3.6241129089329416e-08, | |
| "loss": 0.0012, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.8992042440318304, | |
| "grad_norm": 0.06461716501522924, | |
| "learning_rate": 3.4408544103544663e-08, | |
| "loss": 0.0009, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.9018567639257293, | |
| "grad_norm": 0.05172591688297936, | |
| "learning_rate": 3.262334276422141e-08, | |
| "loss": 0.0009, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.9045092838196287, | |
| "grad_norm": 0.05436450940189997, | |
| "learning_rate": 3.088554210646133e-08, | |
| "loss": 0.0009, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.907161803713528, | |
| "grad_norm": 0.056837362432379836, | |
| "learning_rate": 2.9195158713047345e-08, | |
| "loss": 0.0009, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.909814323607427, | |
| "grad_norm": 0.05587010599540712, | |
| "learning_rate": 2.7552208714290428e-08, | |
| "loss": 0.0009, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.9124668435013263, | |
| "grad_norm": 0.1892861189366528, | |
| "learning_rate": 2.595670778787196e-08, | |
| "loss": 0.0009, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.9151193633952257, | |
| "grad_norm": 0.052539471946618024, | |
| "learning_rate": 2.4408671158695495e-08, | |
| "loss": 0.0009, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.9177718832891246, | |
| "grad_norm": 0.0538284361460942, | |
| "learning_rate": 2.2908113598741344e-08, | |
| "loss": 0.0009, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.920424403183024, | |
| "grad_norm": 0.05244144456334917, | |
| "learning_rate": 2.1455049426926666e-08, | |
| "loss": 0.0009, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.9230769230769234, | |
| "grad_norm": 0.05124729513708506, | |
| "learning_rate": 2.004949250896615e-08, | |
| "loss": 0.0009, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.9257294429708223, | |
| "grad_norm": 0.05183879368776961, | |
| "learning_rate": 1.8691456257243223e-08, | |
| "loss": 0.0009, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.9283819628647216, | |
| "grad_norm": 0.053332712370574915, | |
| "learning_rate": 1.7380953630678488e-08, | |
| "loss": 0.0009, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.9310344827586206, | |
| "grad_norm": 0.05274454609932001, | |
| "learning_rate": 1.6117997134609263e-08, | |
| "loss": 0.0009, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.93368700265252, | |
| "grad_norm": 0.051714962632025734, | |
| "learning_rate": 1.4902598820668023e-08, | |
| "loss": 0.0009, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.936339522546419, | |
| "grad_norm": 0.05202589931146708, | |
| "learning_rate": 1.373477028666803e-08, | |
| "loss": 0.0009, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.938992042440318, | |
| "grad_norm": 0.051866404556823766, | |
| "learning_rate": 1.2614522676493435e-08, | |
| "loss": 0.0009, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.9416445623342176, | |
| "grad_norm": 0.08369474899970679, | |
| "learning_rate": 1.1541866679992131e-08, | |
| "loss": 0.0009, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.9442970822281165, | |
| "grad_norm": 0.4772825657639168, | |
| "learning_rate": 1.0516812532873622e-08, | |
| "loss": 0.0009, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.946949602122016, | |
| "grad_norm": 0.2513499011538888, | |
| "learning_rate": 9.53937001661187e-09, | |
| "loss": 0.001, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.949602122015915, | |
| "grad_norm": 0.0737504415082238, | |
| "learning_rate": 8.609548458351492e-09, | |
| "loss": 0.0009, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.952254641909814, | |
| "grad_norm": 5.681724126406856, | |
| "learning_rate": 7.727356730820035e-09, | |
| "loss": 0.0025, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.9549071618037135, | |
| "grad_norm": 0.052377456451629, | |
| "learning_rate": 6.892803252240287e-09, | |
| "loss": 0.0009, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.957559681697613, | |
| "grad_norm": 0.05184413140922385, | |
| "learning_rate": 6.105895986253108e-09, | |
| "loss": 0.0009, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.9602122015915118, | |
| "grad_norm": 0.05192498288489938, | |
| "learning_rate": 5.366642441841374e-09, | |
| "loss": 0.0009, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.962864721485411, | |
| "grad_norm": 0.05167782759169226, | |
| "learning_rate": 4.675049673255605e-09, | |
| "loss": 0.0009, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.9655172413793105, | |
| "grad_norm": 0.051963406241927336, | |
| "learning_rate": 4.031124279948451e-09, | |
| "loss": 0.0009, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.9681697612732094, | |
| "grad_norm": 0.05196275371314866, | |
| "learning_rate": 3.4348724065119687e-09, | |
| "loss": 0.0009, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.970822281167109, | |
| "grad_norm": 0.05320377195057068, | |
| "learning_rate": 2.886299742618226e-09, | |
| "loss": 0.0009, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.973474801061008, | |
| "grad_norm": 0.05179371657100488, | |
| "learning_rate": 2.385411522966563e-09, | |
| "loss": 0.0009, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.976127320954907, | |
| "grad_norm": 0.052686275251922716, | |
| "learning_rate": 1.9322125272297488e-09, | |
| "loss": 0.0009, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.9787798408488064, | |
| "grad_norm": 0.10193440229554045, | |
| "learning_rate": 1.5267070800140116e-09, | |
| "loss": 0.0009, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.981432360742706, | |
| "grad_norm": 0.05257306758022402, | |
| "learning_rate": 1.168899050812966e-09, | |
| "loss": 0.0009, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.9840848806366047, | |
| "grad_norm": 0.05177475358407255, | |
| "learning_rate": 8.587918539726403e-10, | |
| "loss": 0.0009, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.986737400530504, | |
| "grad_norm": 0.055400243939752354, | |
| "learning_rate": 5.963884486598348e-10, | |
| "loss": 0.0009, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.9893899204244034, | |
| "grad_norm": 0.05514716417865198, | |
| "learning_rate": 3.816913388315913e-10, | |
| "loss": 0.0009, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.9920424403183024, | |
| "grad_norm": 0.1732782167812435, | |
| "learning_rate": 2.1470257321298815e-10, | |
| "loss": 0.001, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.9946949602122017, | |
| "grad_norm": 0.05178211393320526, | |
| "learning_rate": 9.54237452771567e-11, | |
| "loss": 0.0009, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.9973474801061006, | |
| "grad_norm": 0.05235516499580651, | |
| "learning_rate": 2.3855993230292862e-11, | |
| "loss": 0.0009, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.052436056499771545, | |
| "learning_rate": 0.0, | |
| "loss": 0.0009, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.031876802444458, | |
| "eval_runtime": 175.8179, | |
| "eval_samples_per_second": 12.029, | |
| "eval_steps_per_second": 1.507, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1131, | |
| "total_flos": 33123426287616.0, | |
| "train_loss": 0.5493979175771367, | |
| "train_runtime": 4957.8152, | |
| "train_samples_per_second": 3.649, | |
| "train_steps_per_second": 0.228 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1131, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 33123426287616.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |