| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.991097922848665, | |
| "eval_steps": 500, | |
| "global_step": 441, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006782534972445952, | |
| "grad_norm": 7.137413186349268, | |
| "learning_rate": 1.777777777777778e-06, | |
| "loss": 1.1988, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.013565069944891903, | |
| "grad_norm": 7.105029708492786, | |
| "learning_rate": 3.555555555555556e-06, | |
| "loss": 1.1926, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.020347604917337857, | |
| "grad_norm": 6.975628069653311, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 1.1857, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.027130139889783807, | |
| "grad_norm": 5.073330744823269, | |
| "learning_rate": 7.111111111111112e-06, | |
| "loss": 1.1299, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03391267486222976, | |
| "grad_norm": 3.0855210826655557, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 1.0926, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.040695209834675714, | |
| "grad_norm": 5.847010298730299, | |
| "learning_rate": 1.0666666666666667e-05, | |
| "loss": 1.0767, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04747774480712166, | |
| "grad_norm": 5.667170104694723, | |
| "learning_rate": 1.2444444444444446e-05, | |
| "loss": 1.0742, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05426027977956761, | |
| "grad_norm": 5.164747156326828, | |
| "learning_rate": 1.4222222222222224e-05, | |
| "loss": 1.0239, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06104281475201356, | |
| "grad_norm": 4.055520122890776, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.0123, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06782534972445951, | |
| "grad_norm": 2.250978802714125, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "loss": 0.9613, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07460788469690546, | |
| "grad_norm": 2.553100004726335, | |
| "learning_rate": 1.9555555555555557e-05, | |
| "loss": 0.9432, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08139041966935143, | |
| "grad_norm": 2.324385101768056, | |
| "learning_rate": 2.1333333333333335e-05, | |
| "loss": 0.9219, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08817295464179738, | |
| "grad_norm": 2.135936923815521, | |
| "learning_rate": 2.3111111111111112e-05, | |
| "loss": 0.8906, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09495548961424333, | |
| "grad_norm": 1.9227504072028807, | |
| "learning_rate": 2.4888888888888893e-05, | |
| "loss": 0.8878, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.10173802458668928, | |
| "grad_norm": 1.2751512941755272, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.8858, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10852055955913523, | |
| "grad_norm": 1.3275331460158257, | |
| "learning_rate": 2.8444444444444447e-05, | |
| "loss": 0.845, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.11530309453158118, | |
| "grad_norm": 1.3969086473134111, | |
| "learning_rate": 3.0222222222222225e-05, | |
| "loss": 0.8413, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12208562950402713, | |
| "grad_norm": 1.2596435113586295, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 0.8427, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.1288681644764731, | |
| "grad_norm": 1.0919111628660803, | |
| "learning_rate": 3.377777777777778e-05, | |
| "loss": 0.8427, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13565069944891903, | |
| "grad_norm": 2.1726063042436725, | |
| "learning_rate": 3.555555555555555e-05, | |
| "loss": 0.8361, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.142433234421365, | |
| "grad_norm": 1.128837783426394, | |
| "learning_rate": 3.733333333333334e-05, | |
| "loss": 0.8125, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14921576939381093, | |
| "grad_norm": 2.0673361900007405, | |
| "learning_rate": 3.9111111111111115e-05, | |
| "loss": 0.8096, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.1559983043662569, | |
| "grad_norm": 1.6404001379510031, | |
| "learning_rate": 4.088888888888889e-05, | |
| "loss": 0.7992, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.16278083933870285, | |
| "grad_norm": 1.868383646234072, | |
| "learning_rate": 4.266666666666667e-05, | |
| "loss": 0.8177, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1695633743111488, | |
| "grad_norm": 1.4754626309706587, | |
| "learning_rate": 4.444444444444445e-05, | |
| "loss": 0.8192, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17634590928359475, | |
| "grad_norm": 1.9030805002187652, | |
| "learning_rate": 4.6222222222222224e-05, | |
| "loss": 0.8082, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.1831284442560407, | |
| "grad_norm": 1.0440198672277559, | |
| "learning_rate": 4.8e-05, | |
| "loss": 0.7944, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.18991097922848665, | |
| "grad_norm": 2.596599541792705, | |
| "learning_rate": 4.9777777777777785e-05, | |
| "loss": 0.8017, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1966935142009326, | |
| "grad_norm": 1.9546381445372338, | |
| "learning_rate": 5.155555555555556e-05, | |
| "loss": 0.7945, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.20347604917337855, | |
| "grad_norm": 1.751189632085305, | |
| "learning_rate": 5.333333333333333e-05, | |
| "loss": 0.7945, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2102585841458245, | |
| "grad_norm": 1.7584486355585562, | |
| "learning_rate": 5.511111111111112e-05, | |
| "loss": 0.7844, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21704111911827045, | |
| "grad_norm": 1.0584793475130616, | |
| "learning_rate": 5.6888888888888895e-05, | |
| "loss": 0.7812, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.22382365409071642, | |
| "grad_norm": 2.196048787381429, | |
| "learning_rate": 5.8666666666666665e-05, | |
| "loss": 0.8027, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.23060618906316235, | |
| "grad_norm": 1.6456458122301723, | |
| "learning_rate": 6.044444444444445e-05, | |
| "loss": 0.7876, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23738872403560832, | |
| "grad_norm": 1.9008326288925532, | |
| "learning_rate": 6.222222222222223e-05, | |
| "loss": 0.7992, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.24417125900805425, | |
| "grad_norm": 2.7375256976568405, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 0.7788, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2509537939805002, | |
| "grad_norm": 1.7512537641089396, | |
| "learning_rate": 6.577777777777777e-05, | |
| "loss": 0.7907, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2577363289529462, | |
| "grad_norm": 2.0707248009544568, | |
| "learning_rate": 6.755555555555557e-05, | |
| "loss": 0.7894, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.2645188639253921, | |
| "grad_norm": 2.5069295004911054, | |
| "learning_rate": 6.933333333333334e-05, | |
| "loss": 0.7795, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.27130139889783805, | |
| "grad_norm": 1.769408811743829, | |
| "learning_rate": 7.11111111111111e-05, | |
| "loss": 0.7823, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.278083933870284, | |
| "grad_norm": 3.264548001005339, | |
| "learning_rate": 7.28888888888889e-05, | |
| "loss": 0.7768, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.28486646884273, | |
| "grad_norm": 2.4056236262328885, | |
| "learning_rate": 7.466666666666667e-05, | |
| "loss": 0.7841, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.29164900381517594, | |
| "grad_norm": 2.5559829162282597, | |
| "learning_rate": 7.644444444444445e-05, | |
| "loss": 0.7818, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.29843153878762185, | |
| "grad_norm": 1.9829026032666919, | |
| "learning_rate": 7.822222222222223e-05, | |
| "loss": 0.7641, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.3052140737600678, | |
| "grad_norm": 1.9013014130141392, | |
| "learning_rate": 8e-05, | |
| "loss": 0.7673, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3119966087325138, | |
| "grad_norm": 1.3239983020470356, | |
| "learning_rate": 7.999874125693333e-05, | |
| "loss": 0.7792, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.31877914370495974, | |
| "grad_norm": 2.093841653755095, | |
| "learning_rate": 7.999496510695501e-05, | |
| "loss": 0.7703, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.3255616786774057, | |
| "grad_norm": 2.750870577254638, | |
| "learning_rate": 7.998867178772517e-05, | |
| "loss": 0.7712, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.3323442136498516, | |
| "grad_norm": 1.4125603524830153, | |
| "learning_rate": 7.997986169532741e-05, | |
| "loss": 0.7637, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3391267486222976, | |
| "grad_norm": 2.524537816103899, | |
| "learning_rate": 7.996853538424387e-05, | |
| "loss": 0.7827, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.34590928359474354, | |
| "grad_norm": 1.7058887152668756, | |
| "learning_rate": 7.995469356732033e-05, | |
| "loss": 0.7689, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.3526918185671895, | |
| "grad_norm": 1.9625755711375161, | |
| "learning_rate": 7.993833711572133e-05, | |
| "loss": 0.7764, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3594743535396354, | |
| "grad_norm": 1.9608607243300042, | |
| "learning_rate": 7.991946705887539e-05, | |
| "loss": 0.7583, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.3662568885120814, | |
| "grad_norm": 1.5775528005817034, | |
| "learning_rate": 7.989808458441014e-05, | |
| "loss": 0.755, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.37303942348452734, | |
| "grad_norm": 1.9419695989547225, | |
| "learning_rate": 7.98741910380777e-05, | |
| "loss": 0.7467, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.3798219584569733, | |
| "grad_norm": 1.4275820221414393, | |
| "learning_rate": 7.984778792366983e-05, | |
| "loss": 0.7476, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.38660449342941927, | |
| "grad_norm": 1.4199172439377408, | |
| "learning_rate": 7.981887690292339e-05, | |
| "loss": 0.7477, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3933870284018652, | |
| "grad_norm": 1.8664000400986023, | |
| "learning_rate": 7.978745979541574e-05, | |
| "loss": 0.7456, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.40016956337431114, | |
| "grad_norm": 1.0582378742113208, | |
| "learning_rate": 7.975353857845017e-05, | |
| "loss": 0.7453, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.4069520983467571, | |
| "grad_norm": 2.6856740014169875, | |
| "learning_rate": 7.971711538693153e-05, | |
| "loss": 0.7652, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.41373463331920307, | |
| "grad_norm": 2.2330393685649894, | |
| "learning_rate": 7.967819251323182e-05, | |
| "loss": 0.7468, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.420517168291649, | |
| "grad_norm": 1.3266578444651185, | |
| "learning_rate": 7.963677240704588e-05, | |
| "loss": 0.7476, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.42729970326409494, | |
| "grad_norm": 1.337167566676173, | |
| "learning_rate": 7.959285767523732e-05, | |
| "loss": 0.7486, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.4340822382365409, | |
| "grad_norm": 1.7695088306451494, | |
| "learning_rate": 7.954645108167432e-05, | |
| "loss": 0.7542, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.44086477320898687, | |
| "grad_norm": 1.3827391907399533, | |
| "learning_rate": 7.949755554705577e-05, | |
| "loss": 0.7517, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.44764730818143283, | |
| "grad_norm": 1.354435012969295, | |
| "learning_rate": 7.944617414872747e-05, | |
| "loss": 0.7589, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.45442984315387874, | |
| "grad_norm": 0.9204507675004047, | |
| "learning_rate": 7.939231012048833e-05, | |
| "loss": 0.7345, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.4612123781263247, | |
| "grad_norm": 1.4388297723137893, | |
| "learning_rate": 7.933596685238697e-05, | |
| "loss": 0.7478, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.46799491309877067, | |
| "grad_norm": 1.6753627019958215, | |
| "learning_rate": 7.927714789050826e-05, | |
| "loss": 0.732, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.47477744807121663, | |
| "grad_norm": 0.919762144543453, | |
| "learning_rate": 7.921585693675029e-05, | |
| "loss": 0.7385, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.48155998304366254, | |
| "grad_norm": 2.2697133943634427, | |
| "learning_rate": 7.915209784859116e-05, | |
| "loss": 0.7334, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.4883425180161085, | |
| "grad_norm": 1.2442834324078464, | |
| "learning_rate": 7.908587463884638e-05, | |
| "loss": 0.7501, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.49512505298855447, | |
| "grad_norm": 2.0455012466197995, | |
| "learning_rate": 7.90171914754163e-05, | |
| "loss": 0.7323, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.5019075879610004, | |
| "grad_norm": 1.466104092423094, | |
| "learning_rate": 7.894605268102365e-05, | |
| "loss": 0.7284, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.5086901229334464, | |
| "grad_norm": 1.4621681457693403, | |
| "learning_rate": 7.887246273294167e-05, | |
| "loss": 0.7345, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.5154726579058924, | |
| "grad_norm": 1.5491014708068618, | |
| "learning_rate": 7.87964262627122e-05, | |
| "loss": 0.7346, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.5222551928783383, | |
| "grad_norm": 1.1091999421133278, | |
| "learning_rate": 7.871794805585427e-05, | |
| "loss": 0.7313, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.5290377278507842, | |
| "grad_norm": 1.4167418001519376, | |
| "learning_rate": 7.863703305156273e-05, | |
| "loss": 0.7217, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.5358202628232301, | |
| "grad_norm": 1.2449973723473307, | |
| "learning_rate": 7.855368634239769e-05, | |
| "loss": 0.7338, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5426027977956761, | |
| "grad_norm": 1.2279144181708292, | |
| "learning_rate": 7.846791317396373e-05, | |
| "loss": 0.7275, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5493853327681221, | |
| "grad_norm": 1.1622952382317697, | |
| "learning_rate": 7.837971894457991e-05, | |
| "loss": 0.7459, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.556167867740568, | |
| "grad_norm": 1.16239709067931, | |
| "learning_rate": 7.828910920493995e-05, | |
| "loss": 0.7176, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.562950402713014, | |
| "grad_norm": 1.4393853572209945, | |
| "learning_rate": 7.819608965776295e-05, | |
| "loss": 0.7378, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.56973293768546, | |
| "grad_norm": 0.8723694343578424, | |
| "learning_rate": 7.810066615743443e-05, | |
| "loss": 0.7235, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5765154726579059, | |
| "grad_norm": 1.4627194527050393, | |
| "learning_rate": 7.800284470963783e-05, | |
| "loss": 0.7245, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.5832980076303519, | |
| "grad_norm": 1.5007942983664493, | |
| "learning_rate": 7.790263147097661e-05, | |
| "loss": 0.7358, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5900805426027977, | |
| "grad_norm": 1.0492585064630457, | |
| "learning_rate": 7.780003274858674e-05, | |
| "loss": 0.7222, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5968630775752437, | |
| "grad_norm": 1.4048579687963796, | |
| "learning_rate": 7.769505499973977e-05, | |
| "loss": 0.716, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.6036456125476897, | |
| "grad_norm": 1.0188682627658208, | |
| "learning_rate": 7.758770483143634e-05, | |
| "loss": 0.7268, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.6104281475201356, | |
| "grad_norm": 1.1982575150619237, | |
| "learning_rate": 7.747798899999048e-05, | |
| "loss": 0.7407, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.6172106824925816, | |
| "grad_norm": 0.9182390822096019, | |
| "learning_rate": 7.736591441060427e-05, | |
| "loss": 0.7326, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.6239932174650276, | |
| "grad_norm": 1.4475324042423592, | |
| "learning_rate": 7.725148811693336e-05, | |
| "loss": 0.7336, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.6307757524374735, | |
| "grad_norm": 1.1065799129734808, | |
| "learning_rate": 7.71347173206429e-05, | |
| "loss": 0.7173, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.6375582874099195, | |
| "grad_norm": 1.237603778756235, | |
| "learning_rate": 7.701560937095445e-05, | |
| "loss": 0.7303, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.6443408223823655, | |
| "grad_norm": 1.1120475384075377, | |
| "learning_rate": 7.689417176418327e-05, | |
| "loss": 0.7185, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6511233573548114, | |
| "grad_norm": 1.3783747990085908, | |
| "learning_rate": 7.677041214326663e-05, | |
| "loss": 0.7357, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6579058923272573, | |
| "grad_norm": 1.0644720482330112, | |
| "learning_rate": 7.664433829728279e-05, | |
| "loss": 0.7155, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6646884272997032, | |
| "grad_norm": 1.4446573818002777, | |
| "learning_rate": 7.651595816096071e-05, | |
| "loss": 0.7109, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.6714709622721492, | |
| "grad_norm": 1.0329689312285972, | |
| "learning_rate": 7.638527981418075e-05, | |
| "loss": 0.7333, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6782534972445952, | |
| "grad_norm": 1.2999144870004373, | |
| "learning_rate": 7.625231148146601e-05, | |
| "loss": 0.7097, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.6850360322170411, | |
| "grad_norm": 1.1368825847990134, | |
| "learning_rate": 7.611706153146486e-05, | |
| "loss": 0.7128, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6918185671894871, | |
| "grad_norm": 1.1174260276126868, | |
| "learning_rate": 7.597953847642413e-05, | |
| "loss": 0.7241, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.698601102161933, | |
| "grad_norm": 0.8874864224204202, | |
| "learning_rate": 7.583975097165344e-05, | |
| "loss": 0.7189, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.705383637134379, | |
| "grad_norm": 0.6432301304209681, | |
| "learning_rate": 7.56977078149804e-05, | |
| "loss": 0.7145, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.712166172106825, | |
| "grad_norm": 0.9183282978967895, | |
| "learning_rate": 7.555341794619695e-05, | |
| "loss": 0.7107, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.7189487070792708, | |
| "grad_norm": 1.0095498465315442, | |
| "learning_rate": 7.540689044649666e-05, | |
| "loss": 0.703, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.7257312420517168, | |
| "grad_norm": 1.26183318694208, | |
| "learning_rate": 7.525813453790328e-05, | |
| "loss": 0.7127, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.7325137770241628, | |
| "grad_norm": 0.6972659136766679, | |
| "learning_rate": 7.510715958269023e-05, | |
| "loss": 0.7052, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.7392963119966087, | |
| "grad_norm": 0.7185478814495369, | |
| "learning_rate": 7.49539750827914e-05, | |
| "loss": 0.7109, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.7460788469690547, | |
| "grad_norm": 0.9112632508609093, | |
| "learning_rate": 7.479859067920317e-05, | |
| "loss": 0.7037, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7528613819415007, | |
| "grad_norm": 0.8720981679213001, | |
| "learning_rate": 7.464101615137756e-05, | |
| "loss": 0.7075, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7596439169139466, | |
| "grad_norm": 1.0271120509299871, | |
| "learning_rate": 7.448126141660678e-05, | |
| "loss": 0.7204, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7664264518863926, | |
| "grad_norm": 1.016829719094229, | |
| "learning_rate": 7.431933652939909e-05, | |
| "loss": 0.7129, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7732089868588385, | |
| "grad_norm": 1.0366544063351542, | |
| "learning_rate": 7.415525168084593e-05, | |
| "loss": 0.708, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.7799915218312844, | |
| "grad_norm": 1.144762721052778, | |
| "learning_rate": 7.398901719798059e-05, | |
| "loss": 0.7102, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7867740568037304, | |
| "grad_norm": 1.0391721353507355, | |
| "learning_rate": 7.382064354312818e-05, | |
| "loss": 0.7098, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.7935565917761763, | |
| "grad_norm": 0.9632167130265382, | |
| "learning_rate": 7.365014131324725e-05, | |
| "loss": 0.7029, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.8003391267486223, | |
| "grad_norm": 1.227602079185617, | |
| "learning_rate": 7.34775212392628e-05, | |
| "loss": 0.6972, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.8071216617210683, | |
| "grad_norm": 0.8826250475798045, | |
| "learning_rate": 7.330279418539086e-05, | |
| "loss": 0.7018, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.8139041966935142, | |
| "grad_norm": 0.5567261188617991, | |
| "learning_rate": 7.312597114845483e-05, | |
| "loss": 0.6981, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.8206867316659602, | |
| "grad_norm": 0.8765091262196973, | |
| "learning_rate": 7.294706325719331e-05, | |
| "loss": 0.7195, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.8274692666384061, | |
| "grad_norm": 0.9742989954487304, | |
| "learning_rate": 7.276608177155968e-05, | |
| "loss": 0.7033, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.8342518016108521, | |
| "grad_norm": 0.9120816045559339, | |
| "learning_rate": 7.258303808201343e-05, | |
| "loss": 0.6984, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.841034336583298, | |
| "grad_norm": 0.8325780076464987, | |
| "learning_rate": 7.239794370880335e-05, | |
| "loss": 0.6921, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.8478168715557439, | |
| "grad_norm": 0.6155944829472941, | |
| "learning_rate": 7.221081030124235e-05, | |
| "loss": 0.6988, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.8545994065281899, | |
| "grad_norm": 0.7511218044788109, | |
| "learning_rate": 7.202164963697442e-05, | |
| "loss": 0.6972, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.8613819415006359, | |
| "grad_norm": 0.7293772573099593, | |
| "learning_rate": 7.183047362123329e-05, | |
| "loss": 0.7004, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8681644764730818, | |
| "grad_norm": 0.46098198038922594, | |
| "learning_rate": 7.163729428609318e-05, | |
| "loss": 0.6956, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.8749470114455278, | |
| "grad_norm": 0.7377483747880154, | |
| "learning_rate": 7.144212378971151e-05, | |
| "loss": 0.7089, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8817295464179737, | |
| "grad_norm": 0.8138881211490862, | |
| "learning_rate": 7.124497441556374e-05, | |
| "loss": 0.7025, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.8885120813904197, | |
| "grad_norm": 0.7428361558045745, | |
| "learning_rate": 7.104585857167028e-05, | |
| "loss": 0.7093, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.8952946163628657, | |
| "grad_norm": 0.8252776283861111, | |
| "learning_rate": 7.084478878981552e-05, | |
| "loss": 0.7, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.9020771513353115, | |
| "grad_norm": 0.8110674797944571, | |
| "learning_rate": 7.064177772475912e-05, | |
| "loss": 0.695, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.9088596863077575, | |
| "grad_norm": 0.6529878802072909, | |
| "learning_rate": 7.043683815343967e-05, | |
| "loss": 0.6938, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.9156422212802034, | |
| "grad_norm": 0.7848292838747539, | |
| "learning_rate": 7.022998297417034e-05, | |
| "loss": 0.6924, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.9224247562526494, | |
| "grad_norm": 1.1207835540553364, | |
| "learning_rate": 7.00212252058273e-05, | |
| "loss": 0.6988, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.9292072912250954, | |
| "grad_norm": 0.9343138621109743, | |
| "learning_rate": 6.98105779870302e-05, | |
| "loss": 0.6966, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.9359898261975413, | |
| "grad_norm": 0.6897906744897139, | |
| "learning_rate": 6.959805457531536e-05, | |
| "loss": 0.6995, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.9427723611699873, | |
| "grad_norm": 0.48700498757406885, | |
| "learning_rate": 6.938366834630133e-05, | |
| "loss": 0.689, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.9495548961424333, | |
| "grad_norm": 0.460000455610998, | |
| "learning_rate": 6.916743279284709e-05, | |
| "loss": 0.699, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9563374311148792, | |
| "grad_norm": 0.6228357519746542, | |
| "learning_rate": 6.89493615242028e-05, | |
| "loss": 0.6987, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.9631199660873251, | |
| "grad_norm": 0.6004863488992443, | |
| "learning_rate": 6.872946826515338e-05, | |
| "loss": 0.7078, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.969902501059771, | |
| "grad_norm": 0.38322005162459927, | |
| "learning_rate": 6.850776685515453e-05, | |
| "loss": 0.6951, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.976685036032217, | |
| "grad_norm": 0.4778442733404826, | |
| "learning_rate": 6.828427124746191e-05, | |
| "loss": 0.7059, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.983467571004663, | |
| "grad_norm": 0.4078839327460854, | |
| "learning_rate": 6.805899550825285e-05, | |
| "loss": 0.7034, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9902501059771089, | |
| "grad_norm": 0.3930697959309545, | |
| "learning_rate": 6.78319538157411e-05, | |
| "loss": 0.688, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.9970326409495549, | |
| "grad_norm": 0.471709925781642, | |
| "learning_rate": 6.760316045928449e-05, | |
| "loss": 0.6996, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.0038151759220009, | |
| "grad_norm": 0.5236980599221154, | |
| "learning_rate": 6.737262983848554e-05, | |
| "loss": 1.0973, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.0105977108944468, | |
| "grad_norm": 0.7531817832475667, | |
| "learning_rate": 6.714037646228529e-05, | |
| "loss": 0.6724, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.0173802458668928, | |
| "grad_norm": 0.8924803206824311, | |
| "learning_rate": 6.690641494805011e-05, | |
| "loss": 0.6904, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.0241627808393388, | |
| "grad_norm": 0.9578752031258555, | |
| "learning_rate": 6.667076002065168e-05, | |
| "loss": 0.6763, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.0309453158117847, | |
| "grad_norm": 0.9584132989076937, | |
| "learning_rate": 6.643342651154028e-05, | |
| "loss": 0.6928, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.0377278507842307, | |
| "grad_norm": 0.6631156370651635, | |
| "learning_rate": 6.619442935781141e-05, | |
| "loss": 0.6914, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.0445103857566767, | |
| "grad_norm": 0.5101036583421924, | |
| "learning_rate": 6.595378360126555e-05, | |
| "loss": 0.6778, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.0512929207291226, | |
| "grad_norm": 0.6836210135721171, | |
| "learning_rate": 6.571150438746157e-05, | |
| "loss": 0.6776, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.0580754557015684, | |
| "grad_norm": 0.5297506092844936, | |
| "learning_rate": 6.546760696476354e-05, | |
| "loss": 0.6792, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.0648579906740143, | |
| "grad_norm": 0.6440705198875945, | |
| "learning_rate": 6.52221066833809e-05, | |
| "loss": 0.6813, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.0716405256464603, | |
| "grad_norm": 0.5649902540029019, | |
| "learning_rate": 6.497501899440255e-05, | |
| "loss": 0.6758, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.0784230606189062, | |
| "grad_norm": 0.5451745835283485, | |
| "learning_rate": 6.472635944882421e-05, | |
| "loss": 0.6846, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0852055955913522, | |
| "grad_norm": 0.6742930881134588, | |
| "learning_rate": 6.447614369656987e-05, | |
| "loss": 0.6659, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.0919881305637982, | |
| "grad_norm": 0.40739982620953924, | |
| "learning_rate": 6.422438748550667e-05, | |
| "loss": 0.6688, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.0987706655362441, | |
| "grad_norm": 0.520705689346025, | |
| "learning_rate": 6.397110666045388e-05, | |
| "loss": 0.675, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.10555320050869, | |
| "grad_norm": 0.4809784234416612, | |
| "learning_rate": 6.371631716218563e-05, | |
| "loss": 0.6714, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.112335735481136, | |
| "grad_norm": 0.6328561377367079, | |
| "learning_rate": 6.346003502642762e-05, | |
| "loss": 0.6818, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.119118270453582, | |
| "grad_norm": 0.6964171119263963, | |
| "learning_rate": 6.320227638284793e-05, | |
| "loss": 0.6663, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.125900805426028, | |
| "grad_norm": 0.5303565775080002, | |
| "learning_rate": 6.294305745404185e-05, | |
| "loss": 0.6784, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.132683340398474, | |
| "grad_norm": 0.44306420532590796, | |
| "learning_rate": 6.268239455451083e-05, | |
| "loss": 0.6867, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.13946587537092, | |
| "grad_norm": 0.3414530565574301, | |
| "learning_rate": 6.242030408963576e-05, | |
| "loss": 0.6672, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.1462484103433659, | |
| "grad_norm": 0.40031871777699174, | |
| "learning_rate": 6.215680255464442e-05, | |
| "loss": 0.6651, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.1530309453158119, | |
| "grad_norm": 0.277522312209641, | |
| "learning_rate": 6.18919065335733e-05, | |
| "loss": 0.6669, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.1598134802882578, | |
| "grad_norm": 0.35702823672803347, | |
| "learning_rate": 6.162563269822391e-05, | |
| "loss": 0.6636, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.1665960152607038, | |
| "grad_norm": 0.29212513135494134, | |
| "learning_rate": 6.135799780711345e-05, | |
| "loss": 0.675, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.1733785502331497, | |
| "grad_norm": 0.32422232694491343, | |
| "learning_rate": 6.10890187044201e-05, | |
| "loss": 0.6833, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.1801610852055955, | |
| "grad_norm": 0.34358927362334013, | |
| "learning_rate": 6.0818712318922894e-05, | |
| "loss": 0.6682, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.1869436201780414, | |
| "grad_norm": 0.24681935714411127, | |
| "learning_rate": 6.054709566293627e-05, | |
| "loss": 0.683, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.1937261551504874, | |
| "grad_norm": 0.3149112104902362, | |
| "learning_rate": 6.0274185831239325e-05, | |
| "loss": 0.6638, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.2005086901229334, | |
| "grad_norm": 0.34401534901495145, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.6725, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.2072912250953793, | |
| "grad_norm": 0.323484399780841, | |
| "learning_rate": 5.972455542569402e-05, | |
| "loss": 0.6692, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.2140737600678253, | |
| "grad_norm": 0.30391385199629817, | |
| "learning_rate": 5.944786944401875e-05, | |
| "loss": 0.6623, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.2208562950402713, | |
| "grad_norm": 0.26879513471231475, | |
| "learning_rate": 5.916995946880228e-05, | |
| "loss": 0.6842, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.2276388300127172, | |
| "grad_norm": 0.29492095831429094, | |
| "learning_rate": 5.889084299090732e-05, | |
| "loss": 0.6786, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.2344213649851632, | |
| "grad_norm": 0.3788666762600595, | |
| "learning_rate": 5.861053757713043e-05, | |
| "loss": 0.6726, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.2412038999576092, | |
| "grad_norm": 0.4398437908833291, | |
| "learning_rate": 5.832906086909642e-05, | |
| "loss": 0.6742, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.2479864349300551, | |
| "grad_norm": 0.5383171396673415, | |
| "learning_rate": 5.8046430582148034e-05, | |
| "loss": 0.6636, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.254768969902501, | |
| "grad_norm": 0.6287710260233805, | |
| "learning_rate": 5.776266450423097e-05, | |
| "loss": 0.6745, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.261551504874947, | |
| "grad_norm": 0.692244364864307, | |
| "learning_rate": 5.747778049477438e-05, | |
| "loss": 0.692, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.268334039847393, | |
| "grad_norm": 0.5173002490296247, | |
| "learning_rate": 5.7191796483566874e-05, | |
| "loss": 0.6851, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.275116574819839, | |
| "grad_norm": 0.3170872618606908, | |
| "learning_rate": 5.6904730469627985e-05, | |
| "loss": 0.6694, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.281899109792285, | |
| "grad_norm": 0.3605888836619855, | |
| "learning_rate": 5.661660052007547e-05, | |
| "loss": 0.6674, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.288681644764731, | |
| "grad_norm": 0.5359207812719183, | |
| "learning_rate": 5.632742476898813e-05, | |
| "loss": 0.6574, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.2954641797371766, | |
| "grad_norm": 0.5493258534047062, | |
| "learning_rate": 5.6037221416264554e-05, | |
| "loss": 0.6651, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.3022467147096228, | |
| "grad_norm": 0.3583558907183342, | |
| "learning_rate": 5.574600872647766e-05, | |
| "loss": 0.6741, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.3090292496820686, | |
| "grad_norm": 0.38616308348313483, | |
| "learning_rate": 5.5453805027725145e-05, | |
| "loss": 0.6735, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.3158117846545148, | |
| "grad_norm": 0.5232887516865083, | |
| "learning_rate": 5.516062871047602e-05, | |
| "loss": 0.6648, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.3225943196269605, | |
| "grad_norm": 0.5001634228373406, | |
| "learning_rate": 5.48664982264131e-05, | |
| "loss": 0.682, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.3293768545994065, | |
| "grad_norm": 0.4545741719631783, | |
| "learning_rate": 5.4571432087271775e-05, | |
| "loss": 0.6748, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.3361593895718524, | |
| "grad_norm": 0.42741557026919574, | |
| "learning_rate": 5.427544886367488e-05, | |
| "loss": 0.677, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.3429419245442984, | |
| "grad_norm": 0.31094524488846675, | |
| "learning_rate": 5.397856718396394e-05, | |
| "loss": 0.6652, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.3497244595167444, | |
| "grad_norm": 0.24780800036037465, | |
| "learning_rate": 5.368080573302676e-05, | |
| "loss": 0.6664, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.3565069944891903, | |
| "grad_norm": 0.3181527932049964, | |
| "learning_rate": 5.3382183251121415e-05, | |
| "loss": 0.6675, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.3632895294616363, | |
| "grad_norm": 0.29745491627360243, | |
| "learning_rate": 5.3082718532696874e-05, | |
| "loss": 0.6706, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.3700720644340822, | |
| "grad_norm": 0.2584376200240668, | |
| "learning_rate": 5.2782430425210004e-05, | |
| "loss": 0.6631, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.3768545994065282, | |
| "grad_norm": 0.28291959973268804, | |
| "learning_rate": 5.2481337827939486e-05, | |
| "loss": 0.665, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.3836371343789742, | |
| "grad_norm": 0.222511659896141, | |
| "learning_rate": 5.217945969079629e-05, | |
| "loss": 0.6737, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.3904196693514201, | |
| "grad_norm": 0.1904985337395551, | |
| "learning_rate": 5.1876815013131e-05, | |
| "loss": 0.6773, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.397202204323866, | |
| "grad_norm": 0.2556162510484352, | |
| "learning_rate": 5.157342284253812e-05, | |
| "loss": 0.674, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.403984739296312, | |
| "grad_norm": 0.2384701949692735, | |
| "learning_rate": 5.1269302273657195e-05, | |
| "loss": 0.6817, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.410767274268758, | |
| "grad_norm": 0.20411764282852152, | |
| "learning_rate": 5.0964472446971114e-05, | |
| "loss": 0.6536, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.417549809241204, | |
| "grad_norm": 0.20834830910241572, | |
| "learning_rate": 5.06589525476014e-05, | |
| "loss": 0.6694, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.4243323442136497, | |
| "grad_norm": 0.22097226372451154, | |
| "learning_rate": 5.0352761804100835e-05, | |
| "loss": 0.6607, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.431114879186096, | |
| "grad_norm": 0.23524605314054492, | |
| "learning_rate": 5.004591948724317e-05, | |
| "loss": 0.6591, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.4378974141585417, | |
| "grad_norm": 0.32464444157336164, | |
| "learning_rate": 4.9738444908810365e-05, | |
| "loss": 0.6698, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.4446799491309876, | |
| "grad_norm": 0.32718322363813607, | |
| "learning_rate": 4.94303574203771e-05, | |
| "loss": 0.6543, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.4514624841034336, | |
| "grad_norm": 0.2459546193619208, | |
| "learning_rate": 4.9121676412092874e-05, | |
| "loss": 0.6696, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.4582450190758796, | |
| "grad_norm": 0.1878400255657447, | |
| "learning_rate": 4.881242131146163e-05, | |
| "loss": 0.6745, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.4650275540483255, | |
| "grad_norm": 0.21340996885255564, | |
| "learning_rate": 4.8502611582119065e-05, | |
| "loss": 0.6702, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.4718100890207715, | |
| "grad_norm": 0.1930500360689927, | |
| "learning_rate": 4.819226672260763e-05, | |
| "loss": 0.6739, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.4785926239932174, | |
| "grad_norm": 0.19466196179996254, | |
| "learning_rate": 4.788140626514933e-05, | |
| "loss": 0.6678, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.4853751589656634, | |
| "grad_norm": 0.1802358321365316, | |
| "learning_rate": 4.7570049774416414e-05, | |
| "loss": 0.6578, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.4921576939381094, | |
| "grad_norm": 0.21190165053247084, | |
| "learning_rate": 4.7258216846300106e-05, | |
| "loss": 0.6687, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.4989402289105553, | |
| "grad_norm": 0.2025332247212839, | |
| "learning_rate": 4.694592710667723e-05, | |
| "loss": 0.6695, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.5057227638830013, | |
| "grad_norm": 0.18244954655230586, | |
| "learning_rate": 4.663320021017497e-05, | |
| "loss": 0.6655, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.5125052988554473, | |
| "grad_norm": 0.2269278837073207, | |
| "learning_rate": 4.6320055838934e-05, | |
| "loss": 0.6727, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.5192878338278932, | |
| "grad_norm": 0.2259027791570099, | |
| "learning_rate": 4.6006513701369616e-05, | |
| "loss": 0.6606, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.526070368800339, | |
| "grad_norm": 0.2477760784045823, | |
| "learning_rate": 4.5692593530931416e-05, | |
| "loss": 0.6759, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.5328529037727852, | |
| "grad_norm": 0.2603331861037767, | |
| "learning_rate": 4.5378315084861276e-05, | |
| "loss": 0.6624, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.539635438745231, | |
| "grad_norm": 0.2506086748418563, | |
| "learning_rate": 4.506369814294998e-05, | |
| "loss": 0.6599, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.546417973717677, | |
| "grad_norm": 0.26765221421169105, | |
| "learning_rate": 4.474876250629221e-05, | |
| "loss": 0.6764, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.5532005086901228, | |
| "grad_norm": 0.17936756384362929, | |
| "learning_rate": 4.4433527996040443e-05, | |
| "loss": 0.6643, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.559983043662569, | |
| "grad_norm": 0.1830215522438223, | |
| "learning_rate": 4.411801445215739e-05, | |
| "loss": 0.6644, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.5667655786350148, | |
| "grad_norm": 0.20385050238488933, | |
| "learning_rate": 4.38022417321673e-05, | |
| "loss": 0.6559, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.573548113607461, | |
| "grad_norm": 0.21314261341134186, | |
| "learning_rate": 4.348622970990634e-05, | |
| "loss": 0.6733, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.5803306485799067, | |
| "grad_norm": 0.23351272270245574, | |
| "learning_rate": 4.316999827427154e-05, | |
| "loss": 0.6609, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.5871131835523526, | |
| "grad_norm": 0.24621369232426915, | |
| "learning_rate": 4.2853567327969296e-05, | |
| "loss": 0.6681, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.5938957185247986, | |
| "grad_norm": 0.30216358602341464, | |
| "learning_rate": 4.2536956786262585e-05, | |
| "loss": 0.6822, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.6006782534972446, | |
| "grad_norm": 0.2721373138715926, | |
| "learning_rate": 4.222018657571761e-05, | |
| "loss": 0.6636, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.6074607884696905, | |
| "grad_norm": 0.23208696219893624, | |
| "learning_rate": 4.19032766329497e-05, | |
| "loss": 0.6701, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.6142433234421365, | |
| "grad_norm": 0.32166382447684766, | |
| "learning_rate": 4.1586246903368496e-05, | |
| "loss": 0.6786, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.6210258584145825, | |
| "grad_norm": 0.24677680563722856, | |
| "learning_rate": 4.126911733992272e-05, | |
| "loss": 0.6791, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.6278083933870284, | |
| "grad_norm": 0.18800235990418823, | |
| "learning_rate": 4.0951907901844296e-05, | |
| "loss": 0.6591, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.6345909283594744, | |
| "grad_norm": 0.18086364097944294, | |
| "learning_rate": 4.063463855339232e-05, | |
| "loss": 0.6607, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.6413734633319204, | |
| "grad_norm": 0.1816725478249145, | |
| "learning_rate": 4.031732926259639e-05, | |
| "loss": 0.6636, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.6481559983043663, | |
| "grad_norm": 0.2179460901904272, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6582, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.654938533276812, | |
| "grad_norm": 0.2408536943475412, | |
| "learning_rate": 3.9682670737403624e-05, | |
| "loss": 0.6581, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.6617210682492582, | |
| "grad_norm": 0.19000091863164711, | |
| "learning_rate": 3.9365361446607684e-05, | |
| "loss": 0.6597, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.668503603221704, | |
| "grad_norm": 0.1604164527075533, | |
| "learning_rate": 3.904809209815571e-05, | |
| "loss": 0.6609, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.6752861381941502, | |
| "grad_norm": 0.20780114182132747, | |
| "learning_rate": 3.87308826600773e-05, | |
| "loss": 0.6588, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.682068673166596, | |
| "grad_norm": 0.21684423863552219, | |
| "learning_rate": 3.841375309663151e-05, | |
| "loss": 0.6688, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.688851208139042, | |
| "grad_norm": 0.21044006155176154, | |
| "learning_rate": 3.809672336705031e-05, | |
| "loss": 0.6711, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.6956337431114878, | |
| "grad_norm": 0.1911074700409385, | |
| "learning_rate": 3.7779813424282404e-05, | |
| "loss": 0.676, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.702416278083934, | |
| "grad_norm": 0.20477853050090425, | |
| "learning_rate": 3.746304321373742e-05, | |
| "loss": 0.6694, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.7091988130563798, | |
| "grad_norm": 0.2337501696824405, | |
| "learning_rate": 3.714643267203071e-05, | |
| "loss": 0.6545, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.7159813480288257, | |
| "grad_norm": 0.174865590490607, | |
| "learning_rate": 3.683000172572846e-05, | |
| "loss": 0.6766, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.7227638830012717, | |
| "grad_norm": 0.21609429620617512, | |
| "learning_rate": 3.6513770290093674e-05, | |
| "loss": 0.6574, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.7295464179737177, | |
| "grad_norm": 0.24191345936732944, | |
| "learning_rate": 3.6197758267832705e-05, | |
| "loss": 0.6635, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.7363289529461636, | |
| "grad_norm": 0.21998311775904844, | |
| "learning_rate": 3.5881985547842627e-05, | |
| "loss": 0.6676, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.7431114879186096, | |
| "grad_norm": 0.23724435100092056, | |
| "learning_rate": 3.556647200395956e-05, | |
| "loss": 0.6624, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.7498940228910556, | |
| "grad_norm": 0.240215596344218, | |
| "learning_rate": 3.5251237493707804e-05, | |
| "loss": 0.6526, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.7566765578635015, | |
| "grad_norm": 0.19872673849172526, | |
| "learning_rate": 3.493630185705003e-05, | |
| "loss": 0.6752, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.7634590928359475, | |
| "grad_norm": 0.23055129025202933, | |
| "learning_rate": 3.462168491513873e-05, | |
| "loss": 0.6537, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.7702416278083932, | |
| "grad_norm": 0.18151089866178272, | |
| "learning_rate": 3.4307406469068604e-05, | |
| "loss": 0.6699, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.7770241627808394, | |
| "grad_norm": 0.21466329451747204, | |
| "learning_rate": 3.3993486298630384e-05, | |
| "loss": 0.6505, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.7838066977532852, | |
| "grad_norm": 0.22222597418898904, | |
| "learning_rate": 3.367994416106601e-05, | |
| "loss": 0.6646, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.7905892327257313, | |
| "grad_norm": 0.17502941394521884, | |
| "learning_rate": 3.3366799789825044e-05, | |
| "loss": 0.6641, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.797371767698177, | |
| "grad_norm": 0.24125561495280676, | |
| "learning_rate": 3.305407289332279e-05, | |
| "loss": 0.6591, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.8041543026706233, | |
| "grad_norm": 0.16946358730913183, | |
| "learning_rate": 3.27417831536999e-05, | |
| "loss": 0.6607, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.810936837643069, | |
| "grad_norm": 0.2021667757386069, | |
| "learning_rate": 3.2429950225583606e-05, | |
| "loss": 0.6578, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.8177193726155152, | |
| "grad_norm": 0.21522045832395853, | |
| "learning_rate": 3.2118593734850686e-05, | |
| "loss": 0.6666, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.824501907587961, | |
| "grad_norm": 0.1955912817171962, | |
| "learning_rate": 3.180773327739238e-05, | |
| "loss": 0.6666, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.831284442560407, | |
| "grad_norm": 0.17543010393599448, | |
| "learning_rate": 3.1497388417880935e-05, | |
| "loss": 0.6689, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.8380669775328529, | |
| "grad_norm": 0.16672656731926996, | |
| "learning_rate": 3.118757868853838e-05, | |
| "loss": 0.6618, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.8448495125052988, | |
| "grad_norm": 0.17242532075303685, | |
| "learning_rate": 3.087832358790715e-05, | |
| "loss": 0.6516, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.8516320474777448, | |
| "grad_norm": 0.15372909956958672, | |
| "learning_rate": 3.0569642579622905e-05, | |
| "loss": 0.6632, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.8584145824501908, | |
| "grad_norm": 0.15847716517543572, | |
| "learning_rate": 3.0261555091189648e-05, | |
| "loss": 0.6652, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.8651971174226367, | |
| "grad_norm": 0.16812640824945566, | |
| "learning_rate": 2.9954080512756836e-05, | |
| "loss": 0.6544, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.8719796523950827, | |
| "grad_norm": 0.14101975804977546, | |
| "learning_rate": 2.9647238195899168e-05, | |
| "loss": 0.6744, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.8787621873675286, | |
| "grad_norm": 0.16388464384133353, | |
| "learning_rate": 2.9341047452398607e-05, | |
| "loss": 0.655, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.8855447223399746, | |
| "grad_norm": 0.17548344056861861, | |
| "learning_rate": 2.9035527553028906e-05, | |
| "loss": 0.6719, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.8923272573124206, | |
| "grad_norm": 0.1422728380203633, | |
| "learning_rate": 2.873069772634281e-05, | |
| "loss": 0.6614, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.8991097922848663, | |
| "grad_norm": 0.16674659478989506, | |
| "learning_rate": 2.8426577157461897e-05, | |
| "loss": 0.6564, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.9058923272573125, | |
| "grad_norm": 0.15682390820225428, | |
| "learning_rate": 2.8123184986869022e-05, | |
| "loss": 0.6683, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.9126748622297582, | |
| "grad_norm": 0.1734396291500367, | |
| "learning_rate": 2.7820540309203728e-05, | |
| "loss": 0.6655, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.9194573972022044, | |
| "grad_norm": 0.1467568096698091, | |
| "learning_rate": 2.751866217206052e-05, | |
| "loss": 0.6605, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.9262399321746502, | |
| "grad_norm": 0.17404162787939303, | |
| "learning_rate": 2.721756957479001e-05, | |
| "loss": 0.6451, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.9330224671470964, | |
| "grad_norm": 0.16431154167988282, | |
| "learning_rate": 2.691728146730314e-05, | |
| "loss": 0.6717, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.939805002119542, | |
| "grad_norm": 0.1311637148885735, | |
| "learning_rate": 2.6617816748878595e-05, | |
| "loss": 0.6779, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.9465875370919883, | |
| "grad_norm": 0.15022746394032438, | |
| "learning_rate": 2.6319194266973256e-05, | |
| "loss": 0.6642, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.953370072064434, | |
| "grad_norm": 0.13388633011558382, | |
| "learning_rate": 2.6021432816036073e-05, | |
| "loss": 0.6673, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.96015260703688, | |
| "grad_norm": 0.15469686766213264, | |
| "learning_rate": 2.5724551136325132e-05, | |
| "loss": 0.6641, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.966935142009326, | |
| "grad_norm": 0.13266678943395285, | |
| "learning_rate": 2.5428567912728225e-05, | |
| "loss": 0.6524, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.973717676981772, | |
| "grad_norm": 0.14468475238604925, | |
| "learning_rate": 2.5133501773586905e-05, | |
| "loss": 0.6671, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.9805002119542179, | |
| "grad_norm": 0.14376432351647073, | |
| "learning_rate": 2.483937128952399e-05, | |
| "loss": 0.6609, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.9872827469266638, | |
| "grad_norm": 0.159716122840683, | |
| "learning_rate": 2.4546194972274852e-05, | |
| "loss": 0.67, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.9940652818991098, | |
| "grad_norm": 0.13840709480527438, | |
| "learning_rate": 2.425399127352235e-05, | |
| "loss": 0.6613, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.0008478168715556, | |
| "grad_norm": 0.23463615437325822, | |
| "learning_rate": 2.3962778583735463e-05, | |
| "loss": 1.0436, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.0076303518440017, | |
| "grad_norm": 0.17334268341845893, | |
| "learning_rate": 2.3672575231011888e-05, | |
| "loss": 0.6518, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.0144128868164475, | |
| "grad_norm": 0.16319357693293915, | |
| "learning_rate": 2.338339947992455e-05, | |
| "loss": 0.6541, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.0211954217888937, | |
| "grad_norm": 0.1642296066509337, | |
| "learning_rate": 2.3095269530372032e-05, | |
| "loss": 0.642, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.0279779567613394, | |
| "grad_norm": 0.14714181360085615, | |
| "learning_rate": 2.2808203516433136e-05, | |
| "loss": 0.6585, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.0347604917337856, | |
| "grad_norm": 0.17243791907969416, | |
| "learning_rate": 2.2522219505225627e-05, | |
| "loss": 0.6367, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.0415430267062313, | |
| "grad_norm": 0.15616193111829843, | |
| "learning_rate": 2.2237335495769035e-05, | |
| "loss": 0.6435, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.0483255616786775, | |
| "grad_norm": 0.16853105336890806, | |
| "learning_rate": 2.1953569417851983e-05, | |
| "loss": 0.6512, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.0551080966511233, | |
| "grad_norm": 0.1491939764011879, | |
| "learning_rate": 2.1670939130903585e-05, | |
| "loss": 0.6421, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.0618906316235694, | |
| "grad_norm": 0.15064138598058863, | |
| "learning_rate": 2.1389462422869576e-05, | |
| "loss": 0.6449, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.068673166596015, | |
| "grad_norm": 0.14394176360070499, | |
| "learning_rate": 2.11091570090927e-05, | |
| "loss": 0.645, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.0754557015684614, | |
| "grad_norm": 0.14131247697973692, | |
| "learning_rate": 2.0830040531197744e-05, | |
| "loss": 0.6486, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.082238236540907, | |
| "grad_norm": 0.13739301329908918, | |
| "learning_rate": 2.055213055598126e-05, | |
| "loss": 0.6294, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.0890207715133533, | |
| "grad_norm": 0.14567983415785407, | |
| "learning_rate": 2.027544457430599e-05, | |
| "loss": 0.6533, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.095803306485799, | |
| "grad_norm": 0.12128606904438433, | |
| "learning_rate": 2.0000000000000012e-05, | |
| "loss": 0.6439, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.1025858414582452, | |
| "grad_norm": 0.13694286534266129, | |
| "learning_rate": 1.9725814168760688e-05, | |
| "loss": 0.6326, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.109368376430691, | |
| "grad_norm": 0.11981049040505881, | |
| "learning_rate": 1.9452904337063757e-05, | |
| "loss": 0.6294, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.1161509114031367, | |
| "grad_norm": 0.14362025503542125, | |
| "learning_rate": 1.9181287681077116e-05, | |
| "loss": 0.6391, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.122933446375583, | |
| "grad_norm": 0.13487152713963088, | |
| "learning_rate": 1.8910981295579903e-05, | |
| "loss": 0.6363, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.1297159813480286, | |
| "grad_norm": 0.13488125276633461, | |
| "learning_rate": 1.864200219288656e-05, | |
| "loss": 0.6499, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.136498516320475, | |
| "grad_norm": 0.11819804560452499, | |
| "learning_rate": 1.8374367301776112e-05, | |
| "loss": 0.6484, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.1432810512929206, | |
| "grad_norm": 0.14333365446921437, | |
| "learning_rate": 1.8108093466426713e-05, | |
| "loss": 0.6517, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.1500635862653668, | |
| "grad_norm": 0.11619579623506068, | |
| "learning_rate": 1.7843197445355593e-05, | |
| "loss": 0.6424, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.1568461212378125, | |
| "grad_norm": 0.11214714038576876, | |
| "learning_rate": 1.7579695910364235e-05, | |
| "loss": 0.6327, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.1636286562102587, | |
| "grad_norm": 0.11836436894373582, | |
| "learning_rate": 1.7317605445489178e-05, | |
| "loss": 0.6332, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.1704111911827044, | |
| "grad_norm": 0.12190048890581265, | |
| "learning_rate": 1.7056942545958167e-05, | |
| "loss": 0.6664, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.1771937261551506, | |
| "grad_norm": 0.12475488169306143, | |
| "learning_rate": 1.679772361715208e-05, | |
| "loss": 0.6411, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.1839762611275964, | |
| "grad_norm": 0.13291841960547746, | |
| "learning_rate": 1.6539964973572388e-05, | |
| "loss": 0.6454, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.1907587961000425, | |
| "grad_norm": 0.12175988658418246, | |
| "learning_rate": 1.6283682837814388e-05, | |
| "loss": 0.646, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.1975413310724883, | |
| "grad_norm": 0.11016625987010713, | |
| "learning_rate": 1.6028893339546122e-05, | |
| "loss": 0.6326, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.2043238660449345, | |
| "grad_norm": 0.1108749966998608, | |
| "learning_rate": 1.5775612514493343e-05, | |
| "loss": 0.6461, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.21110640101738, | |
| "grad_norm": 0.1143880178225703, | |
| "learning_rate": 1.552385630343014e-05, | |
| "loss": 0.6372, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.2178889359898264, | |
| "grad_norm": 0.11233236110610757, | |
| "learning_rate": 1.527364055117579e-05, | |
| "loss": 0.6286, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.224671470962272, | |
| "grad_norm": 0.11855577442735764, | |
| "learning_rate": 1.502498100559747e-05, | |
| "loss": 0.6439, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.231454005934718, | |
| "grad_norm": 0.12612061766992566, | |
| "learning_rate": 1.4777893316619114e-05, | |
| "loss": 0.6369, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.238236540907164, | |
| "grad_norm": 0.10643244567354232, | |
| "learning_rate": 1.4532393035236477e-05, | |
| "loss": 0.6385, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.24501907587961, | |
| "grad_norm": 0.1173067781996439, | |
| "learning_rate": 1.4288495612538427e-05, | |
| "loss": 0.6339, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.251801610852056, | |
| "grad_norm": 0.11386078478518308, | |
| "learning_rate": 1.404621639873447e-05, | |
| "loss": 0.6397, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.2585841458245017, | |
| "grad_norm": 0.09826930234307879, | |
| "learning_rate": 1.3805570642188602e-05, | |
| "loss": 0.645, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.265366680796948, | |
| "grad_norm": 0.11305991403115098, | |
| "learning_rate": 1.3566573488459729e-05, | |
| "loss": 0.6449, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.2721492157693937, | |
| "grad_norm": 0.11019252840128578, | |
| "learning_rate": 1.3329239979348341e-05, | |
| "loss": 0.64, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.27893175074184, | |
| "grad_norm": 0.11234819489299995, | |
| "learning_rate": 1.30935850519499e-05, | |
| "loss": 0.6514, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.2857142857142856, | |
| "grad_norm": 0.1112071135535242, | |
| "learning_rate": 1.2859623537714719e-05, | |
| "loss": 0.6551, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.2924968206867318, | |
| "grad_norm": 0.131859623404786, | |
| "learning_rate": 1.262737016151447e-05, | |
| "loss": 0.6299, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.2992793556591775, | |
| "grad_norm": 0.11621464001477737, | |
| "learning_rate": 1.2396839540715528e-05, | |
| "loss": 0.6316, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.3060618906316237, | |
| "grad_norm": 0.10479280516442933, | |
| "learning_rate": 1.2168046184258896e-05, | |
| "loss": 0.6284, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.3128444256040694, | |
| "grad_norm": 0.10786907647373126, | |
| "learning_rate": 1.1941004491747145e-05, | |
| "loss": 0.6436, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.3196269605765156, | |
| "grad_norm": 0.10782220614805556, | |
| "learning_rate": 1.1715728752538103e-05, | |
| "loss": 0.644, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.3264094955489614, | |
| "grad_norm": 0.11055552541032848, | |
| "learning_rate": 1.1492233144845497e-05, | |
| "loss": 0.654, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.3331920305214076, | |
| "grad_norm": 0.11811567288889502, | |
| "learning_rate": 1.1270531734846642e-05, | |
| "loss": 0.6383, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.3399745654938533, | |
| "grad_norm": 0.09614027593231717, | |
| "learning_rate": 1.1050638475797193e-05, | |
| "loss": 0.6486, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.3467571004662995, | |
| "grad_norm": 0.11607457015859846, | |
| "learning_rate": 1.083256720715292e-05, | |
| "loss": 0.6404, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.3535396354387452, | |
| "grad_norm": 0.10091742280729821, | |
| "learning_rate": 1.0616331653698673e-05, | |
| "loss": 0.6532, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.360322170411191, | |
| "grad_norm": 0.09008242164464271, | |
| "learning_rate": 1.0401945424684653e-05, | |
| "loss": 0.6345, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.367104705383637, | |
| "grad_norm": 0.11007729141322087, | |
| "learning_rate": 1.0189422012969814e-05, | |
| "loss": 0.6482, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.373887240356083, | |
| "grad_norm": 0.08713293928424415, | |
| "learning_rate": 9.978774794172717e-06, | |
| "loss": 0.6243, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.380669775328529, | |
| "grad_norm": 0.09736881200508285, | |
| "learning_rate": 9.770017025829675e-06, | |
| "loss": 0.6464, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.387452310300975, | |
| "grad_norm": 0.09620789674440036, | |
| "learning_rate": 9.563161846560342e-06, | |
| "loss": 0.6399, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.394234845273421, | |
| "grad_norm": 0.1013119106642466, | |
| "learning_rate": 9.358222275240884e-06, | |
| "loss": 0.6369, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.4010173802458668, | |
| "grad_norm": 0.0916584684509351, | |
| "learning_rate": 9.155211210184495e-06, | |
| "loss": 0.6389, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.407799915218313, | |
| "grad_norm": 0.10238057837102806, | |
| "learning_rate": 8.954141428329723e-06, | |
| "loss": 0.6377, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.4145824501907587, | |
| "grad_norm": 0.10324400627592539, | |
| "learning_rate": 8.755025584436266e-06, | |
| "loss": 0.6376, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.421364985163205, | |
| "grad_norm": 0.10057048511977558, | |
| "learning_rate": 8.55787621028851e-06, | |
| "loss": 0.6506, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.4281475201356506, | |
| "grad_norm": 0.0920558216849332, | |
| "learning_rate": 8.362705713906836e-06, | |
| "loss": 0.6313, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.434930055108097, | |
| "grad_norm": 0.10610753513988694, | |
| "learning_rate": 8.169526378766713e-06, | |
| "loss": 0.6429, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.4417125900805425, | |
| "grad_norm": 0.10421896294296241, | |
| "learning_rate": 7.978350363025588e-06, | |
| "loss": 0.6407, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.4484951250529887, | |
| "grad_norm": 0.09993739787650231, | |
| "learning_rate": 7.789189698757656e-06, | |
| "loss": 0.6348, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.4552776600254345, | |
| "grad_norm": 0.09901740077583913, | |
| "learning_rate": 7.602056291196671e-06, | |
| "loss": 0.6459, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.4620601949978806, | |
| "grad_norm": 0.10236110912446401, | |
| "learning_rate": 7.416961917986572e-06, | |
| "loss": 0.6509, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.4688427299703264, | |
| "grad_norm": 0.08705071344819373, | |
| "learning_rate": 7.233918228440324e-06, | |
| "loss": 0.6348, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.4756252649427726, | |
| "grad_norm": 0.11881375541999774, | |
| "learning_rate": 7.052936742806693e-06, | |
| "loss": 0.646, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.4824077999152183, | |
| "grad_norm": 0.10091840020224621, | |
| "learning_rate": 6.874028851545174e-06, | |
| "loss": 0.64, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.489190334887664, | |
| "grad_norm": 0.0828156389233352, | |
| "learning_rate": 6.697205814609149e-06, | |
| "loss": 0.6324, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.4959728698601102, | |
| "grad_norm": 0.09511789444296942, | |
| "learning_rate": 6.522478760737212e-06, | |
| "loss": 0.6398, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.5027554048325564, | |
| "grad_norm": 0.10690963856134064, | |
| "learning_rate": 6.349858686752748e-06, | |
| "loss": 0.6441, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.509537939805002, | |
| "grad_norm": 0.10170196889030197, | |
| "learning_rate": 6.17935645687183e-06, | |
| "loss": 0.6307, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.516320474777448, | |
| "grad_norm": 0.0873576460247102, | |
| "learning_rate": 6.010982802019429e-06, | |
| "loss": 0.6382, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.523103009749894, | |
| "grad_norm": 0.10193237211341288, | |
| "learning_rate": 5.8447483191540784e-06, | |
| "loss": 0.6427, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.52988554472234, | |
| "grad_norm": 0.09337413798566872, | |
| "learning_rate": 5.680663470600918e-06, | |
| "loss": 0.6479, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.536668079694786, | |
| "grad_norm": 0.08453984752561841, | |
| "learning_rate": 5.518738583393228e-06, | |
| "loss": 0.6508, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.5434506146672318, | |
| "grad_norm": 0.08158348255933849, | |
| "learning_rate": 5.358983848622452e-06, | |
| "loss": 0.6372, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.550233149639678, | |
| "grad_norm": 0.09556063525511022, | |
| "learning_rate": 5.201409320796842e-06, | |
| "loss": 0.6397, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.5570156846121237, | |
| "grad_norm": 0.09622696560627544, | |
| "learning_rate": 5.046024917208603e-06, | |
| "loss": 0.6386, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.56379821958457, | |
| "grad_norm": 0.09148441726550699, | |
| "learning_rate": 4.892840417309775e-06, | |
| "loss": 0.6489, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.5705807545570156, | |
| "grad_norm": 0.08606094930168158, | |
| "learning_rate": 4.7418654620967216e-06, | |
| "loss": 0.6527, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.577363289529462, | |
| "grad_norm": 0.08364373433134552, | |
| "learning_rate": 4.593109553503343e-06, | |
| "loss": 0.6428, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.5841458245019076, | |
| "grad_norm": 0.0843513293417055, | |
| "learning_rate": 4.446582053803066e-06, | |
| "loss": 0.6408, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.5909283594743533, | |
| "grad_norm": 0.09217062871361449, | |
| "learning_rate": 4.302292185019603e-06, | |
| "loss": 0.6496, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.5977108944467995, | |
| "grad_norm": 0.08394208095826587, | |
| "learning_rate": 4.16024902834657e-06, | |
| "loss": 0.6407, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.6044934294192457, | |
| "grad_norm": 0.07675967715086138, | |
| "learning_rate": 4.020461523575873e-06, | |
| "loss": 0.6497, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.6112759643916914, | |
| "grad_norm": 0.08253304493993292, | |
| "learning_rate": 3.882938468535158e-06, | |
| "loss": 0.6473, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.618058499364137, | |
| "grad_norm": 0.08837276182393047, | |
| "learning_rate": 3.747688518534003e-06, | |
| "loss": 0.6467, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.6248410343365833, | |
| "grad_norm": 0.08553692728277926, | |
| "learning_rate": 3.6147201858192627e-06, | |
| "loss": 0.6271, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.6316235693090295, | |
| "grad_norm": 0.08672709388396345, | |
| "learning_rate": 3.4840418390392895e-06, | |
| "loss": 0.6417, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.6384061042814753, | |
| "grad_norm": 0.08408576121900961, | |
| "learning_rate": 3.3556617027172168e-06, | |
| "loss": 0.6474, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.645188639253921, | |
| "grad_norm": 0.08946340052273392, | |
| "learning_rate": 3.2295878567333784e-06, | |
| "loss": 0.6475, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.651971174226367, | |
| "grad_norm": 0.10659211899169968, | |
| "learning_rate": 3.1058282358167503e-06, | |
| "loss": 0.6478, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.658753709198813, | |
| "grad_norm": 0.08095162713105776, | |
| "learning_rate": 2.984390629045555e-06, | |
| "loss": 0.6469, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.665536244171259, | |
| "grad_norm": 0.08002747021341323, | |
| "learning_rate": 2.8652826793570975e-06, | |
| "loss": 0.625, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.672318779143705, | |
| "grad_norm": 0.08060733679400718, | |
| "learning_rate": 2.7485118830666535e-06, | |
| "loss": 0.6419, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.679101314116151, | |
| "grad_norm": 0.07939209749073067, | |
| "learning_rate": 2.634085589395734e-06, | |
| "loss": 0.6396, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.685883849088597, | |
| "grad_norm": 0.08689462211707911, | |
| "learning_rate": 2.5220110000095366e-06, | |
| "loss": 0.6453, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.692666384061043, | |
| "grad_norm": 0.08614923525446444, | |
| "learning_rate": 2.4122951685636674e-06, | |
| "loss": 0.6473, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.6994489190334887, | |
| "grad_norm": 0.08063088657355912, | |
| "learning_rate": 2.3049450002602394e-06, | |
| "loss": 0.627, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.706231454005935, | |
| "grad_norm": 0.08042306304216008, | |
| "learning_rate": 2.199967251413262e-06, | |
| "loss": 0.6411, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.7130139889783806, | |
| "grad_norm": 0.07728604571452127, | |
| "learning_rate": 2.0973685290234024e-06, | |
| "loss": 0.6307, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.7197965239508264, | |
| "grad_norm": 0.08086443240008037, | |
| "learning_rate": 1.997155290362187e-06, | |
| "loss": 0.6452, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.7265790589232726, | |
| "grad_norm": 0.07595495043741796, | |
| "learning_rate": 1.8993338425655805e-06, | |
| "loss": 0.6472, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.7333615938957188, | |
| "grad_norm": 0.08185565508209801, | |
| "learning_rate": 1.8039103422370452e-06, | |
| "loss": 0.6341, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.7401441288681645, | |
| "grad_norm": 0.0849805109945081, | |
| "learning_rate": 1.7108907950600516e-06, | |
| "loss": 0.6292, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.7469266638406102, | |
| "grad_norm": 0.07973338132333087, | |
| "learning_rate": 1.6202810554201099e-06, | |
| "loss": 0.6437, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.7537091988130564, | |
| "grad_norm": 0.07790884660042009, | |
| "learning_rate": 1.532086826036281e-06, | |
| "loss": 0.6456, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.760491733785502, | |
| "grad_norm": 0.08873860153969253, | |
| "learning_rate": 1.4463136576023185e-06, | |
| "loss": 0.6502, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.7672742687579484, | |
| "grad_norm": 0.08041178968421962, | |
| "learning_rate": 1.3629669484372722e-06, | |
| "loss": 0.6469, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.774056803730394, | |
| "grad_norm": 0.08336326759495126, | |
| "learning_rate": 1.2820519441457502e-06, | |
| "loss": 0.6431, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.7808393387028403, | |
| "grad_norm": 0.07304922054760385, | |
| "learning_rate": 1.2035737372877931e-06, | |
| "loss": 0.6456, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.787621873675286, | |
| "grad_norm": 0.07545648509921901, | |
| "learning_rate": 1.127537267058334e-06, | |
| "loss": 0.6459, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.794404408647732, | |
| "grad_norm": 0.08168214003465647, | |
| "learning_rate": 1.0539473189763583e-06, | |
| "loss": 0.6429, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.801186943620178, | |
| "grad_norm": 0.07428190943785873, | |
| "learning_rate": 9.828085245837183e-07, | |
| "loss": 0.6341, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.807969478592624, | |
| "grad_norm": 0.0753237289047626, | |
| "learning_rate": 9.141253611536238e-07, | |
| "loss": 0.6494, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.81475201356507, | |
| "grad_norm": 0.07502561278928772, | |
| "learning_rate": 8.479021514088548e-07, | |
| "loss": 0.6424, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.821534548537516, | |
| "grad_norm": 0.07379263477638442, | |
| "learning_rate": 7.841430632497204e-07, | |
| "loss": 0.6426, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.828317083509962, | |
| "grad_norm": 0.07233921710316674, | |
| "learning_rate": 7.228521094917318e-07, | |
| "loss": 0.6438, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.835099618482408, | |
| "grad_norm": 0.07416362872430317, | |
| "learning_rate": 6.640331476130435e-07, | |
| "loss": 0.6405, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.8418821534548537, | |
| "grad_norm": 0.07700076601803095, | |
| "learning_rate": 6.076898795116792e-07, | |
| "loss": 0.6489, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.8486646884272995, | |
| "grad_norm": 0.07609683257441903, | |
| "learning_rate": 5.538258512725403e-07, | |
| "loss": 0.6209, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.8554472233997457, | |
| "grad_norm": 0.07737901602583444, | |
| "learning_rate": 5.024444529442285e-07, | |
| "loss": 0.6447, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.862229758372192, | |
| "grad_norm": 0.0759561526572301, | |
| "learning_rate": 4.5354891832569245e-07, | |
| "loss": 0.6327, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.8690122933446376, | |
| "grad_norm": 0.07540799112373622, | |
| "learning_rate": 4.0714232476269265e-07, | |
| "loss": 0.6357, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.8757948283170833, | |
| "grad_norm": 0.07261245642997437, | |
| "learning_rate": 3.632275929541207e-07, | |
| "loss": 0.6384, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.8825773632895295, | |
| "grad_norm": 0.07730392178003218, | |
| "learning_rate": 3.218074867681864e-07, | |
| "loss": 0.6343, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.8893598982619753, | |
| "grad_norm": 0.07481289428504849, | |
| "learning_rate": 2.8288461306846817e-07, | |
| "loss": 0.6418, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.8961424332344214, | |
| "grad_norm": 0.07365288746369081, | |
| "learning_rate": 2.464614215498351e-07, | |
| "loss": 0.6399, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.902924968206867, | |
| "grad_norm": 0.07293600568995603, | |
| "learning_rate": 2.1254020458427281e-07, | |
| "loss": 0.645, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.9097075031793134, | |
| "grad_norm": 0.07299059860739354, | |
| "learning_rate": 1.8112309707661647e-07, | |
| "loss": 0.6366, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.916490038151759, | |
| "grad_norm": 0.07762603812234999, | |
| "learning_rate": 1.522120763301782e-07, | |
| "loss": 0.6468, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.9232725731242053, | |
| "grad_norm": 0.07121635004426441, | |
| "learning_rate": 1.2580896192230906e-07, | |
| "loss": 0.6457, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.930055108096651, | |
| "grad_norm": 0.07189028443702318, | |
| "learning_rate": 1.019154155898594e-07, | |
| "loss": 0.648, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.9368376430690972, | |
| "grad_norm": 0.07237541136245566, | |
| "learning_rate": 8.053294112462696e-08, | |
| "loss": 0.6406, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.943620178041543, | |
| "grad_norm": 0.07059922854737963, | |
| "learning_rate": 6.166288427867706e-08, | |
| "loss": 0.6349, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.9504027130139887, | |
| "grad_norm": 0.07048464732580594, | |
| "learning_rate": 4.530643267968149e-08, | |
| "loss": 0.6532, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.957185247986435, | |
| "grad_norm": 0.0714872818335937, | |
| "learning_rate": 3.1464615756133796e-08, | |
| "loss": 0.6323, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.963967782958881, | |
| "grad_norm": 0.07124837526302868, | |
| "learning_rate": 2.01383046725967e-08, | |
| "loss": 0.6495, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.970750317931327, | |
| "grad_norm": 0.0731063695999088, | |
| "learning_rate": 1.1328212274839267e-08, | |
| "loss": 0.6516, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.9775328529037726, | |
| "grad_norm": 0.07121633404132001, | |
| "learning_rate": 5.034893044997269e-09, | |
| "loss": 0.6377, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.9843153878762188, | |
| "grad_norm": 0.0754923808775944, | |
| "learning_rate": 1.2587430666766153e-09, | |
| "loss": 0.6443, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.991097922848665, | |
| "grad_norm": 0.07154903528443775, | |
| "learning_rate": 0.0, | |
| "loss": 0.6409, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.991097922848665, | |
| "step": 441, | |
| "total_flos": 7397230226964480.0, | |
| "train_loss": 0.6955865539120438, | |
| "train_runtime": 47632.2951, | |
| "train_samples_per_second": 4.753, | |
| "train_steps_per_second": 0.009 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 441, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7397230226964480.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |