| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 606, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0049504950495049506, | |
| "learning_rate": 0, | |
| "loss": 2.1216, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009900990099009901, | |
| "learning_rate": 0, | |
| "loss": 2.1427, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01485148514851485, | |
| "learning_rate": 0, | |
| "loss": 2.1346, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.019801980198019802, | |
| "grad_norm": 2.259030342102051, | |
| "learning_rate": 0.0, | |
| "loss": 2.12, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.024752475247524754, | |
| "grad_norm": 2.259030342102051, | |
| "learning_rate": 0.0, | |
| "loss": 2.1802, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0297029702970297, | |
| "grad_norm": 2.259030342102051, | |
| "learning_rate": 0.0, | |
| "loss": 2.1452, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.034653465346534656, | |
| "grad_norm": 2.259030342102051, | |
| "learning_rate": 0.0, | |
| "loss": 2.1545, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.039603960396039604, | |
| "grad_norm": 2.1996872425079346, | |
| "learning_rate": 3.3722619737900224e-06, | |
| "loss": 2.1611, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04455445544554455, | |
| "grad_norm": 2.1996872425079346, | |
| "learning_rate": 3.3722619737900224e-06, | |
| "loss": 2.2054, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04950495049504951, | |
| "grad_norm": 2.1996872425079346, | |
| "learning_rate": 3.3722619737900224e-06, | |
| "loss": 2.1423, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.054455445544554455, | |
| "grad_norm": 2.1996872425079346, | |
| "learning_rate": 3.3722619737900224e-06, | |
| "loss": 2.1626, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.0594059405940594, | |
| "grad_norm": 2.2271275520324707, | |
| "learning_rate": 5.344908771065097e-06, | |
| "loss": 2.1728, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06435643564356436, | |
| "grad_norm": 2.2271275520324707, | |
| "learning_rate": 5.344908771065097e-06, | |
| "loss": 2.1859, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06930693069306931, | |
| "grad_norm": 2.2271275520324707, | |
| "learning_rate": 5.344908771065097e-06, | |
| "loss": 2.1523, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07425742574257425, | |
| "grad_norm": 2.2271275520324707, | |
| "learning_rate": 5.344908771065097e-06, | |
| "loss": 2.2623, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07920792079207921, | |
| "grad_norm": 2.2643508911132812, | |
| "learning_rate": 6.744523947580045e-06, | |
| "loss": 2.2004, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08415841584158416, | |
| "grad_norm": 2.2643508911132812, | |
| "learning_rate": 6.744523947580045e-06, | |
| "loss": 2.079, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0891089108910891, | |
| "grad_norm": 2.2643508911132812, | |
| "learning_rate": 6.744523947580045e-06, | |
| "loss": 2.1093, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09405940594059406, | |
| "grad_norm": 2.2643508911132812, | |
| "learning_rate": 6.744523947580045e-06, | |
| "loss": 2.172, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09900990099009901, | |
| "grad_norm": 2.128260850906372, | |
| "learning_rate": 7.830149820263363e-06, | |
| "loss": 2.171, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10396039603960396, | |
| "grad_norm": 2.128260850906372, | |
| "learning_rate": 7.830149820263363e-06, | |
| "loss": 2.2302, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.10891089108910891, | |
| "grad_norm": 2.128260850906372, | |
| "learning_rate": 7.830149820263363e-06, | |
| "loss": 2.1686, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11386138613861387, | |
| "grad_norm": 2.128260850906372, | |
| "learning_rate": 7.830149820263363e-06, | |
| "loss": 2.1244, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.1188118811881188, | |
| "grad_norm": 2.14156174659729, | |
| "learning_rate": 8.717170744855119e-06, | |
| "loss": 2.2406, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12376237623762376, | |
| "grad_norm": 2.14156174659729, | |
| "learning_rate": 8.717170744855119e-06, | |
| "loss": 2.1582, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12871287128712872, | |
| "grad_norm": 2.14156174659729, | |
| "learning_rate": 8.717170744855119e-06, | |
| "loss": 2.1773, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.13366336633663367, | |
| "grad_norm": 2.14156174659729, | |
| "learning_rate": 8.717170744855119e-06, | |
| "loss": 2.0421, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13861386138613863, | |
| "grad_norm": 2.0065624713897705, | |
| "learning_rate": 9.46713625058711e-06, | |
| "loss": 2.1027, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.14356435643564355, | |
| "grad_norm": 2.0065624713897705, | |
| "learning_rate": 9.46713625058711e-06, | |
| "loss": 1.9643, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 2.0065624713897705, | |
| "learning_rate": 9.46713625058711e-06, | |
| "loss": 2.0852, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15346534653465346, | |
| "grad_norm": 2.0065624713897705, | |
| "learning_rate": 9.46713625058711e-06, | |
| "loss": 2.0588, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.15841584158415842, | |
| "grad_norm": 1.8585381507873535, | |
| "learning_rate": 1.0116785921370066e-05, | |
| "loss": 2.1563, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.16336633663366337, | |
| "grad_norm": 1.8585381507873535, | |
| "learning_rate": 1.0116785921370066e-05, | |
| "loss": 1.9961, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.16831683168316833, | |
| "grad_norm": 1.8585381507873535, | |
| "learning_rate": 1.0116785921370066e-05, | |
| "loss": 2.1021, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.17326732673267325, | |
| "grad_norm": 1.8585381507873535, | |
| "learning_rate": 1.0116785921370066e-05, | |
| "loss": 2.0583, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1782178217821782, | |
| "grad_norm": 1.813769817352295, | |
| "learning_rate": 1.0689817542130194e-05, | |
| "loss": 1.9911, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.18316831683168316, | |
| "grad_norm": 1.813769817352295, | |
| "learning_rate": 1.0689817542130194e-05, | |
| "loss": 2.024, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.18811881188118812, | |
| "grad_norm": 1.813769817352295, | |
| "learning_rate": 1.0689817542130194e-05, | |
| "loss": 2.0567, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.19306930693069307, | |
| "grad_norm": 1.813769817352295, | |
| "learning_rate": 1.0689817542130194e-05, | |
| "loss": 2.0862, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.19801980198019803, | |
| "grad_norm": 1.8762847185134888, | |
| "learning_rate": 1.1202411794053388e-05, | |
| "loss": 2.0415, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.20297029702970298, | |
| "grad_norm": 1.8762847185134888, | |
| "learning_rate": 1.1202411794053388e-05, | |
| "loss": 1.8461, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.2079207920792079, | |
| "grad_norm": 1.8762847185134888, | |
| "learning_rate": 1.1202411794053388e-05, | |
| "loss": 1.9262, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.21287128712871287, | |
| "grad_norm": 1.8762847185134888, | |
| "learning_rate": 1.1202411794053388e-05, | |
| "loss": 1.9178, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.21782178217821782, | |
| "grad_norm": 2.218778610229492, | |
| "learning_rate": 1.1666109698457423e-05, | |
| "loss": 1.8544, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.22277227722772278, | |
| "grad_norm": 2.218778610229492, | |
| "learning_rate": 1.1666109698457423e-05, | |
| "loss": 1.968, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.22772277227722773, | |
| "grad_norm": 2.218778610229492, | |
| "learning_rate": 1.1666109698457423e-05, | |
| "loss": 1.9128, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.23267326732673269, | |
| "grad_norm": 2.218778610229492, | |
| "learning_rate": 1.1666109698457423e-05, | |
| "loss": 1.8448, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.2376237623762376, | |
| "grad_norm": 1.9425584077835083, | |
| "learning_rate": 1.2089432718645142e-05, | |
| "loss": 1.8355, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.24257425742574257, | |
| "grad_norm": 1.9425584077835083, | |
| "learning_rate": 1.2089432718645142e-05, | |
| "loss": 1.8293, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.24752475247524752, | |
| "grad_norm": 1.9425584077835083, | |
| "learning_rate": 1.2089432718645142e-05, | |
| "loss": 1.8464, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2524752475247525, | |
| "grad_norm": 1.9425584077835083, | |
| "learning_rate": 1.2089432718645142e-05, | |
| "loss": 2.0073, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.25742574257425743, | |
| "grad_norm": 1.4947453737258911, | |
| "learning_rate": 1.2478852147789474e-05, | |
| "loss": 1.8608, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2623762376237624, | |
| "grad_norm": 1.4947453737258911, | |
| "learning_rate": 1.2478852147789474e-05, | |
| "loss": 1.858, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.26732673267326734, | |
| "grad_norm": 1.4947453737258911, | |
| "learning_rate": 1.2478852147789474e-05, | |
| "loss": 1.9059, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2722772277227723, | |
| "grad_norm": 1.4947453737258911, | |
| "learning_rate": 1.2478852147789474e-05, | |
| "loss": 1.8371, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.27722772277227725, | |
| "grad_norm": 1.1246448755264282, | |
| "learning_rate": 1.2839398224377133e-05, | |
| "loss": 1.8752, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.28217821782178215, | |
| "grad_norm": 1.1246448755264282, | |
| "learning_rate": 1.2839398224377133e-05, | |
| "loss": 1.8094, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2871287128712871, | |
| "grad_norm": 1.1246448755264282, | |
| "learning_rate": 1.2839398224377133e-05, | |
| "loss": 1.7313, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.29207920792079206, | |
| "grad_norm": 1.1246448755264282, | |
| "learning_rate": 1.2839398224377133e-05, | |
| "loss": 1.8594, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 1.0055837631225586, | |
| "learning_rate": 1.317505859132846e-05, | |
| "loss": 1.7862, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.30198019801980197, | |
| "grad_norm": 1.0055837631225586, | |
| "learning_rate": 1.317505859132846e-05, | |
| "loss": 1.7247, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3069306930693069, | |
| "grad_norm": 1.0055837631225586, | |
| "learning_rate": 1.317505859132846e-05, | |
| "loss": 1.7997, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.3118811881188119, | |
| "grad_norm": 1.0055837631225586, | |
| "learning_rate": 1.317505859132846e-05, | |
| "loss": 1.7965, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.31683168316831684, | |
| "grad_norm": 0.931380033493042, | |
| "learning_rate": 1.348904789516009e-05, | |
| "loss": 1.7517, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.3217821782178218, | |
| "grad_norm": 0.931380033493042, | |
| "learning_rate": 1.348904789516009e-05, | |
| "loss": 1.8095, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.32673267326732675, | |
| "grad_norm": 0.931380033493042, | |
| "learning_rate": 1.348904789516009e-05, | |
| "loss": 1.8155, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3316831683168317, | |
| "grad_norm": 0.931380033493042, | |
| "learning_rate": 1.348904789516009e-05, | |
| "loss": 1.7587, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.33663366336633666, | |
| "grad_norm": 1.029356837272644, | |
| "learning_rate": 1.3783995508828243e-05, | |
| "loss": 1.8325, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3415841584158416, | |
| "grad_norm": 1.029356837272644, | |
| "learning_rate": 1.3783995508828243e-05, | |
| "loss": 1.7398, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3465346534653465, | |
| "grad_norm": 1.029356837272644, | |
| "learning_rate": 1.3783995508828243e-05, | |
| "loss": 1.8004, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.35148514851485146, | |
| "grad_norm": 1.029356837272644, | |
| "learning_rate": 1.3783995508828243e-05, | |
| "loss": 1.7193, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3564356435643564, | |
| "grad_norm": 0.8115963935852051, | |
| "learning_rate": 1.4062079515920212e-05, | |
| "loss": 1.7698, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3613861386138614, | |
| "grad_norm": 0.8115963935852051, | |
| "learning_rate": 1.4062079515920212e-05, | |
| "loss": 1.7462, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.36633663366336633, | |
| "grad_norm": 0.8115963935852051, | |
| "learning_rate": 1.4062079515920212e-05, | |
| "loss": 1.7259, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3712871287128713, | |
| "grad_norm": 0.8115963935852051, | |
| "learning_rate": 1.4062079515920212e-05, | |
| "loss": 1.7222, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.37623762376237624, | |
| "grad_norm": 0.6555297374725342, | |
| "learning_rate": 1.4325124421002207e-05, | |
| "loss": 1.7193, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3811881188118812, | |
| "grad_norm": 0.6555297374725342, | |
| "learning_rate": 1.4325124421002207e-05, | |
| "loss": 1.692, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.38613861386138615, | |
| "grad_norm": 0.6555297374725342, | |
| "learning_rate": 1.4325124421002207e-05, | |
| "loss": 1.7357, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3910891089108911, | |
| "grad_norm": 0.6555297374725342, | |
| "learning_rate": 1.4325124421002207e-05, | |
| "loss": 1.6804, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.39603960396039606, | |
| "grad_norm": 0.6669710278511047, | |
| "learning_rate": 1.4574673767843407e-05, | |
| "loss": 1.7015, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.400990099009901, | |
| "grad_norm": 0.6669710278511047, | |
| "learning_rate": 1.4574673767843407e-05, | |
| "loss": 1.7281, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.40594059405940597, | |
| "grad_norm": 0.6669710278511047, | |
| "learning_rate": 1.4574673767843407e-05, | |
| "loss": 1.7246, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.41089108910891087, | |
| "grad_norm": 0.6669710278511047, | |
| "learning_rate": 1.4574673767843407e-05, | |
| "loss": 1.7016, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4158415841584158, | |
| "grad_norm": 0.5633140802383423, | |
| "learning_rate": 1.4812045021652206e-05, | |
| "loss": 1.7358, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4207920792079208, | |
| "grad_norm": 0.5633140802383423, | |
| "learning_rate": 1.4812045021652206e-05, | |
| "loss": 1.7006, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.42574257425742573, | |
| "grad_norm": 0.5633140802383423, | |
| "learning_rate": 1.4812045021652206e-05, | |
| "loss": 1.6776, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4306930693069307, | |
| "grad_norm": 0.5633140802383423, | |
| "learning_rate": 1.4812045021652206e-05, | |
| "loss": 1.6905, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.43564356435643564, | |
| "grad_norm": 0.5185403227806091, | |
| "learning_rate": 1.5038371672247447e-05, | |
| "loss": 1.7344, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.4405940594059406, | |
| "grad_norm": 0.5185403227806091, | |
| "learning_rate": 1.5038371672247447e-05, | |
| "loss": 1.6872, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 0.5185403227806091, | |
| "learning_rate": 1.5038371672247447e-05, | |
| "loss": 1.6632, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4504950495049505, | |
| "grad_norm": 0.5185403227806091, | |
| "learning_rate": 1.5038371672247447e-05, | |
| "loss": 1.7155, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.45544554455445546, | |
| "grad_norm": 0.4828384518623352, | |
| "learning_rate": 1.5254635970494278e-05, | |
| "loss": 1.6871, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.4603960396039604, | |
| "grad_norm": 0.4828384518623352, | |
| "learning_rate": 1.5254635970494278e-05, | |
| "loss": 1.6906, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.46534653465346537, | |
| "grad_norm": 0.4828384518623352, | |
| "learning_rate": 1.5254635970494278e-05, | |
| "loss": 1.6122, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.47029702970297027, | |
| "grad_norm": 0.4828384518623352, | |
| "learning_rate": 1.5254635970494278e-05, | |
| "loss": 1.7374, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4752475247524752, | |
| "grad_norm": 0.4721103310585022, | |
| "learning_rate": 1.5461694692435165e-05, | |
| "loss": 1.6655, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.4801980198019802, | |
| "grad_norm": 0.4721103310585022, | |
| "learning_rate": 1.5461694692435165e-05, | |
| "loss": 1.5872, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.48514851485148514, | |
| "grad_norm": 0.4721103310585022, | |
| "learning_rate": 1.5461694692435165e-05, | |
| "loss": 1.6449, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4900990099009901, | |
| "grad_norm": 0.4721103310585022, | |
| "learning_rate": 1.5461694692435165e-05, | |
| "loss": 1.6695, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.49504950495049505, | |
| "grad_norm": 0.45932599902153015, | |
| "learning_rate": 1.5660299640526725e-05, | |
| "loss": 1.6836, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.45932599902153015, | |
| "learning_rate": 1.5660299640526725e-05, | |
| "loss": 1.6853, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.504950495049505, | |
| "grad_norm": 0.45932599902153015, | |
| "learning_rate": 1.5660299640526725e-05, | |
| "loss": 1.6404, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5099009900990099, | |
| "grad_norm": 0.45932599902153015, | |
| "learning_rate": 1.5660299640526725e-05, | |
| "loss": 1.7104, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5148514851485149, | |
| "grad_norm": 0.44053927063941956, | |
| "learning_rate": 1.5851114121579497e-05, | |
| "loss": 1.6309, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5198019801980198, | |
| "grad_norm": 0.44053927063941956, | |
| "learning_rate": 1.5851114121579497e-05, | |
| "loss": 1.6296, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5247524752475248, | |
| "grad_norm": 0.44053927063941956, | |
| "learning_rate": 1.5851114121579497e-05, | |
| "loss": 1.685, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5297029702970297, | |
| "grad_norm": 0.44053927063941956, | |
| "learning_rate": 1.5851114121579497e-05, | |
| "loss": 1.6317, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5346534653465347, | |
| "grad_norm": 0.4426601827144623, | |
| "learning_rate": 1.603472631319529e-05, | |
| "loss": 1.621, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5396039603960396, | |
| "grad_norm": 0.4426601827144623, | |
| "learning_rate": 1.603472631319529e-05, | |
| "loss": 1.5765, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5445544554455446, | |
| "grad_norm": 0.4426601827144623, | |
| "learning_rate": 1.603472631319529e-05, | |
| "loss": 1.6865, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5495049504950495, | |
| "grad_norm": 0.4426601827144623, | |
| "learning_rate": 1.603472631319529e-05, | |
| "loss": 1.586, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5544554455445545, | |
| "grad_norm": 0.43527039885520935, | |
| "learning_rate": 1.6211660198167156e-05, | |
| "loss": 1.6683, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5594059405940595, | |
| "grad_norm": 0.43527039885520935, | |
| "learning_rate": 1.6211660198167156e-05, | |
| "loss": 1.5759, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5643564356435643, | |
| "grad_norm": 0.43527039885520935, | |
| "learning_rate": 1.6211660198167156e-05, | |
| "loss": 1.6003, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5693069306930693, | |
| "grad_norm": 0.43527039885520935, | |
| "learning_rate": 1.6211660198167156e-05, | |
| "loss": 1.5873, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5742574257425742, | |
| "grad_norm": 0.4649295508861542, | |
| "learning_rate": 1.6382384579263326e-05, | |
| "loss": 1.6703, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5792079207920792, | |
| "grad_norm": 0.4649295508861542, | |
| "learning_rate": 1.6382384579263326e-05, | |
| "loss": 1.6352, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5841584158415841, | |
| "grad_norm": 0.4649295508861542, | |
| "learning_rate": 1.6382384579263326e-05, | |
| "loss": 1.6522, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5891089108910891, | |
| "grad_norm": 0.4649295508861542, | |
| "learning_rate": 1.6382384579263326e-05, | |
| "loss": 1.6246, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.594059405940594, | |
| "grad_norm": 0.42726704478263855, | |
| "learning_rate": 1.6547320565118483e-05, | |
| "loss": 1.6479, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.599009900990099, | |
| "grad_norm": 0.42726704478263855, | |
| "learning_rate": 1.6547320565118483e-05, | |
| "loss": 1.6093, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6039603960396039, | |
| "grad_norm": 0.42726704478263855, | |
| "learning_rate": 1.6547320565118483e-05, | |
| "loss": 1.5866, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.6089108910891089, | |
| "grad_norm": 0.42726704478263855, | |
| "learning_rate": 1.6547320565118483e-05, | |
| "loss": 1.646, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6138613861386139, | |
| "grad_norm": 0.4181938171386719, | |
| "learning_rate": 1.670684782820849e-05, | |
| "loss": 1.6139, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6188118811881188, | |
| "grad_norm": 0.4181938171386719, | |
| "learning_rate": 1.670684782820849e-05, | |
| "loss": 1.6139, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6237623762376238, | |
| "grad_norm": 0.4181938171386719, | |
| "learning_rate": 1.670684782820849e-05, | |
| "loss": 1.6142, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6287128712871287, | |
| "grad_norm": 0.4181938171386719, | |
| "learning_rate": 1.670684782820849e-05, | |
| "loss": 1.6083, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6336633663366337, | |
| "grad_norm": 0.40746423602104187, | |
| "learning_rate": 1.6861309868950113e-05, | |
| "loss": 1.5874, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6386138613861386, | |
| "grad_norm": 0.40746423602104187, | |
| "learning_rate": 1.6861309868950113e-05, | |
| "loss": 1.6076, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6435643564356436, | |
| "grad_norm": 0.40746423602104187, | |
| "learning_rate": 1.6861309868950113e-05, | |
| "loss": 1.578, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6485148514851485, | |
| "grad_norm": 0.40746423602104187, | |
| "learning_rate": 1.6861309868950113e-05, | |
| "loss": 1.5159, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6534653465346535, | |
| "grad_norm": 0.4084773659706116, | |
| "learning_rate": 1.701101846952252e-05, | |
| "loss": 1.5989, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6584158415841584, | |
| "grad_norm": 0.4084773659706116, | |
| "learning_rate": 1.701101846952252e-05, | |
| "loss": 1.6173, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6633663366336634, | |
| "grad_norm": 0.4084773659706116, | |
| "learning_rate": 1.701101846952252e-05, | |
| "loss": 1.6067, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6683168316831684, | |
| "grad_norm": 0.4084773659706116, | |
| "learning_rate": 1.701101846952252e-05, | |
| "loss": 1.571, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6732673267326733, | |
| "grad_norm": 0.3729560971260071, | |
| "learning_rate": 1.7156257482618265e-05, | |
| "loss": 1.561, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6782178217821783, | |
| "grad_norm": 0.3729560971260071, | |
| "learning_rate": 1.7156257482618265e-05, | |
| "loss": 1.5984, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6831683168316832, | |
| "grad_norm": 0.3729560971260071, | |
| "learning_rate": 1.7156257482618265e-05, | |
| "loss": 1.5748, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6881188118811881, | |
| "grad_norm": 0.3729560971260071, | |
| "learning_rate": 1.7156257482618265e-05, | |
| "loss": 1.5892, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.693069306930693, | |
| "grad_norm": 0.36557894945144653, | |
| "learning_rate": 1.7297286070850474e-05, | |
| "loss": 1.5574, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.698019801980198, | |
| "grad_norm": 0.36557894945144653, | |
| "learning_rate": 1.7297286070850474e-05, | |
| "loss": 1.5777, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.7029702970297029, | |
| "grad_norm": 0.36557894945144653, | |
| "learning_rate": 1.7297286070850474e-05, | |
| "loss": 1.4983, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7079207920792079, | |
| "grad_norm": 0.36557894945144653, | |
| "learning_rate": 1.7297286070850474e-05, | |
| "loss": 1.5714, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7128712871287128, | |
| "grad_norm": 0.3284773528575897, | |
| "learning_rate": 1.7434341489710237e-05, | |
| "loss": 1.5375, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.7178217821782178, | |
| "grad_norm": 0.3284773528575897, | |
| "learning_rate": 1.7434341489710237e-05, | |
| "loss": 1.5888, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7227722772277227, | |
| "grad_norm": 0.3284773528575897, | |
| "learning_rate": 1.7434341489710237e-05, | |
| "loss": 1.5763, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7277227722772277, | |
| "grad_norm": 0.3284773528575897, | |
| "learning_rate": 1.7434341489710237e-05, | |
| "loss": 1.5336, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7326732673267327, | |
| "grad_norm": 0.3244377672672272, | |
| "learning_rate": 1.7567641489142956e-05, | |
| "loss": 1.5122, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7376237623762376, | |
| "grad_norm": 0.3244377672672272, | |
| "learning_rate": 1.7567641489142956e-05, | |
| "loss": 1.5999, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7425742574257426, | |
| "grad_norm": 0.3244377672672272, | |
| "learning_rate": 1.7567641489142956e-05, | |
| "loss": 1.5643, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7475247524752475, | |
| "grad_norm": 0.3244377672672272, | |
| "learning_rate": 1.7567641489142956e-05, | |
| "loss": 1.5742, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7524752475247525, | |
| "grad_norm": 0.33302003145217896, | |
| "learning_rate": 1.769738639479223e-05, | |
| "loss": 1.6125, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.7574257425742574, | |
| "grad_norm": 0.33302003145217896, | |
| "learning_rate": 1.769738639479223e-05, | |
| "loss": 1.5294, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7623762376237624, | |
| "grad_norm": 0.33302003145217896, | |
| "learning_rate": 1.769738639479223e-05, | |
| "loss": 1.6211, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7673267326732673, | |
| "grad_norm": 0.33302003145217896, | |
| "learning_rate": 1.769738639479223e-05, | |
| "loss": 1.5182, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7722772277227723, | |
| "grad_norm": 0.31815779209136963, | |
| "learning_rate": 1.782376091885457e-05, | |
| "loss": 1.5432, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7772277227722773, | |
| "grad_norm": 0.31815779209136963, | |
| "learning_rate": 1.782376091885457e-05, | |
| "loss": 1.6547, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7821782178217822, | |
| "grad_norm": 0.31815779209136963, | |
| "learning_rate": 1.782376091885457e-05, | |
| "loss": 1.5951, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7871287128712872, | |
| "grad_norm": 0.31815779209136963, | |
| "learning_rate": 1.782376091885457e-05, | |
| "loss": 1.5594, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7920792079207921, | |
| "grad_norm": 0.30909690260887146, | |
| "learning_rate": 1.794693574163343e-05, | |
| "loss": 1.5609, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7970297029702971, | |
| "grad_norm": 0.30909690260887146, | |
| "learning_rate": 1.794693574163343e-05, | |
| "loss": 1.5435, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.801980198019802, | |
| "grad_norm": 0.30909690260887146, | |
| "learning_rate": 1.794693574163343e-05, | |
| "loss": 1.584, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.806930693069307, | |
| "grad_norm": 0.30909690260887146, | |
| "learning_rate": 1.794693574163343e-05, | |
| "loss": 1.5258, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.8118811881188119, | |
| "grad_norm": 0.3037148118019104, | |
| "learning_rate": 1.8067068897776073e-05, | |
| "loss": 1.6069, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.8168316831683168, | |
| "grad_norm": 0.3037148118019104, | |
| "learning_rate": 1.8067068897776073e-05, | |
| "loss": 1.5888, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8217821782178217, | |
| "grad_norm": 0.3037148118019104, | |
| "learning_rate": 1.8067068897776073e-05, | |
| "loss": 1.5755, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.8267326732673267, | |
| "grad_norm": 0.3037148118019104, | |
| "learning_rate": 1.8067068897776073e-05, | |
| "loss": 1.6131, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.8316831683168316, | |
| "grad_norm": 0.30614173412323, | |
| "learning_rate": 1.8184306995442228e-05, | |
| "loss": 1.5445, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8366336633663366, | |
| "grad_norm": 0.30614173412323, | |
| "learning_rate": 1.8184306995442228e-05, | |
| "loss": 1.5598, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8415841584158416, | |
| "grad_norm": 0.30614173412323, | |
| "learning_rate": 1.8184306995442228e-05, | |
| "loss": 1.5947, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8465346534653465, | |
| "grad_norm": 0.30614173412323, | |
| "learning_rate": 1.8184306995442228e-05, | |
| "loss": 1.5464, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8514851485148515, | |
| "grad_norm": 0.2994905412197113, | |
| "learning_rate": 1.829878629199893e-05, | |
| "loss": 1.5968, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8564356435643564, | |
| "grad_norm": 0.2994905412197113, | |
| "learning_rate": 1.829878629199893e-05, | |
| "loss": 1.5762, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8613861386138614, | |
| "grad_norm": 0.2994905412197113, | |
| "learning_rate": 1.829878629199893e-05, | |
| "loss": 1.6057, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.8663366336633663, | |
| "grad_norm": 0.2994905412197113, | |
| "learning_rate": 1.829878629199893e-05, | |
| "loss": 1.5344, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8712871287128713, | |
| "grad_norm": 0.29959049820899963, | |
| "learning_rate": 1.8410633646037467e-05, | |
| "loss": 1.645, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8762376237623762, | |
| "grad_norm": 0.29959049820899963, | |
| "learning_rate": 1.8410633646037467e-05, | |
| "loss": 1.527, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8811881188118812, | |
| "grad_norm": 0.29959049820899963, | |
| "learning_rate": 1.8410633646037467e-05, | |
| "loss": 1.5353, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8861386138613861, | |
| "grad_norm": 0.29959049820899963, | |
| "learning_rate": 1.8410633646037467e-05, | |
| "loss": 1.528, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8910891089108911, | |
| "grad_norm": 0.2880352735519409, | |
| "learning_rate": 1.8519967362393555e-05, | |
| "loss": 1.5629, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8960396039603961, | |
| "grad_norm": 0.2880352735519409, | |
| "learning_rate": 1.8519967362393555e-05, | |
| "loss": 1.5101, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.900990099009901, | |
| "grad_norm": 0.2880352735519409, | |
| "learning_rate": 1.8519967362393555e-05, | |
| "loss": 1.531, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.905940594059406, | |
| "grad_norm": 0.2880352735519409, | |
| "learning_rate": 1.8519967362393555e-05, | |
| "loss": 1.4733, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.9108910891089109, | |
| "grad_norm": 0.3000313341617584, | |
| "learning_rate": 1.86268979442843e-05, | |
| "loss": 1.5998, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.9158415841584159, | |
| "grad_norm": 0.3000313341617584, | |
| "learning_rate": 1.86268979442843e-05, | |
| "loss": 1.5109, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.9207920792079208, | |
| "grad_norm": 0.3000313341617584, | |
| "learning_rate": 1.86268979442843e-05, | |
| "loss": 1.5017, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.9257425742574258, | |
| "grad_norm": 0.3000313341617584, | |
| "learning_rate": 1.86268979442843e-05, | |
| "loss": 1.5368, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.9306930693069307, | |
| "grad_norm": 0.2862202227115631, | |
| "learning_rate": 1.8731528764550483e-05, | |
| "loss": 1.4827, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.9356435643564357, | |
| "grad_norm": 0.2862202227115631, | |
| "learning_rate": 1.8731528764550483e-05, | |
| "loss": 1.549, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.9405940594059405, | |
| "grad_norm": 0.2862202227115631, | |
| "learning_rate": 1.8731528764550483e-05, | |
| "loss": 1.5179, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9455445544554455, | |
| "grad_norm": 0.2862202227115631, | |
| "learning_rate": 1.8731528764550483e-05, | |
| "loss": 1.5286, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9504950495049505, | |
| "grad_norm": 0.3076187074184418, | |
| "learning_rate": 1.883395666622519e-05, | |
| "loss": 1.6123, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9554455445544554, | |
| "grad_norm": 0.3076187074184418, | |
| "learning_rate": 1.883395666622519e-05, | |
| "loss": 1.4996, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.9603960396039604, | |
| "grad_norm": 0.3076187074184418, | |
| "learning_rate": 1.883395666622519e-05, | |
| "loss": 1.5736, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9653465346534653, | |
| "grad_norm": 0.3076187074184418, | |
| "learning_rate": 1.883395666622519e-05, | |
| "loss": 1.5538, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9702970297029703, | |
| "grad_norm": 0.2966230511665344, | |
| "learning_rate": 1.893427250117422e-05, | |
| "loss": 1.5405, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9752475247524752, | |
| "grad_norm": 0.2966230511665344, | |
| "learning_rate": 1.893427250117422e-05, | |
| "loss": 1.5288, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.9801980198019802, | |
| "grad_norm": 0.2966230511665344, | |
| "learning_rate": 1.893427250117422e-05, | |
| "loss": 1.5328, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9851485148514851, | |
| "grad_norm": 0.2966230511665344, | |
| "learning_rate": 1.893427250117422e-05, | |
| "loss": 1.5754, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9900990099009901, | |
| "grad_norm": 0.3104676604270935, | |
| "learning_rate": 1.903256161431675e-05, | |
| "loss": 1.5506, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.995049504950495, | |
| "grad_norm": 0.3104676604270935, | |
| "learning_rate": 1.903256161431675e-05, | |
| "loss": 1.5486, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.3104676604270935, | |
| "learning_rate": 1.903256161431675e-05, | |
| "loss": 1.4932, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.004950495049505, | |
| "grad_norm": 0.3104676604270935, | |
| "learning_rate": 1.903256161431675e-05, | |
| "loss": 1.4559, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.00990099009901, | |
| "grad_norm": 0.305254191160202, | |
| "learning_rate": 1.912890427989334e-05, | |
| "loss": 1.4787, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.0148514851485149, | |
| "grad_norm": 0.305254191160202, | |
| "learning_rate": 1.912890427989334e-05, | |
| "loss": 1.5086, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.0198019801980198, | |
| "grad_norm": 0.305254191160202, | |
| "learning_rate": 1.912890427989334e-05, | |
| "loss": 1.4966, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.0247524752475248, | |
| "grad_norm": 0.305254191160202, | |
| "learning_rate": 1.912890427989334e-05, | |
| "loss": 1.4825, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.0297029702970297, | |
| "grad_norm": 0.2713533341884613, | |
| "learning_rate": 1.922337609536952e-05, | |
| "loss": 1.5957, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.0346534653465347, | |
| "grad_norm": 0.2713533341884613, | |
| "learning_rate": 1.922337609536952e-05, | |
| "loss": 1.463, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.0396039603960396, | |
| "grad_norm": 0.2713533341884613, | |
| "learning_rate": 1.922337609536952e-05, | |
| "loss": 1.5056, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.0445544554455446, | |
| "grad_norm": 0.2713533341884613, | |
| "learning_rate": 1.922337609536952e-05, | |
| "loss": 1.5194, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.0495049504950495, | |
| "grad_norm": 0.30546605587005615, | |
| "learning_rate": 1.9316048337817536e-05, | |
| "loss": 1.518, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.0544554455445545, | |
| "grad_norm": 0.30546605587005615, | |
| "learning_rate": 1.9316048337817536e-05, | |
| "loss": 1.4553, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.0594059405940595, | |
| "grad_norm": 0.30546605587005615, | |
| "learning_rate": 1.9316048337817536e-05, | |
| "loss": 1.5377, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.0643564356435644, | |
| "grad_norm": 0.30546605587005615, | |
| "learning_rate": 1.9316048337817536e-05, | |
| "loss": 1.5212, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.0693069306930694, | |
| "grad_norm": 0.299664705991745, | |
| "learning_rate": 1.9406988286985313e-05, | |
| "loss": 1.4681, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.0742574257425743, | |
| "grad_norm": 0.299664705991745, | |
| "learning_rate": 1.9406988286985313e-05, | |
| "loss": 1.4718, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.0792079207920793, | |
| "grad_norm": 0.299664705991745, | |
| "learning_rate": 1.9406988286985313e-05, | |
| "loss": 1.4783, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.0841584158415842, | |
| "grad_norm": 0.299664705991745, | |
| "learning_rate": 1.9406988286985313e-05, | |
| "loss": 1.4054, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.0891089108910892, | |
| "grad_norm": 0.2781918942928314, | |
| "learning_rate": 1.9496259518720788e-05, | |
| "loss": 1.4988, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.0940594059405941, | |
| "grad_norm": 0.2781918942928314, | |
| "learning_rate": 1.9496259518720788e-05, | |
| "loss": 1.4985, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.099009900990099, | |
| "grad_norm": 0.2781918942928314, | |
| "learning_rate": 1.9496259518720788e-05, | |
| "loss": 1.4837, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.103960396039604, | |
| "grad_norm": 0.2781918942928314, | |
| "learning_rate": 1.9496259518720788e-05, | |
| "loss": 1.526, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.108910891089109, | |
| "grad_norm": 0.284312903881073, | |
| "learning_rate": 1.958392217195718e-05, | |
| "loss": 1.4355, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.113861386138614, | |
| "grad_norm": 0.284312903881073, | |
| "learning_rate": 1.958392217195718e-05, | |
| "loss": 1.4934, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.118811881188119, | |
| "grad_norm": 0.284312903881073, | |
| "learning_rate": 1.958392217195718e-05, | |
| "loss": 1.5242, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.1237623762376239, | |
| "grad_norm": 0.284312903881073, | |
| "learning_rate": 1.958392217195718e-05, | |
| "loss": 1.5275, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.1287128712871288, | |
| "grad_norm": 0.2917441725730896, | |
| "learning_rate": 1.9670033192067303e-05, | |
| "loss": 1.4717, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.1336633663366338, | |
| "grad_norm": 0.2917441725730896, | |
| "learning_rate": 1.9670033192067303e-05, | |
| "loss": 1.4589, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.1386138613861387, | |
| "grad_norm": 0.2917441725730896, | |
| "learning_rate": 1.9670033192067303e-05, | |
| "loss": 1.4698, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.1435643564356435, | |
| "grad_norm": 0.2917441725730896, | |
| "learning_rate": 1.9670033192067303e-05, | |
| "loss": 1.5495, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.1485148514851484, | |
| "grad_norm": 0.27202197909355164, | |
| "learning_rate": 1.9754646553053346e-05, | |
| "loss": 1.5051, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.1534653465346534, | |
| "grad_norm": 0.27202197909355164, | |
| "learning_rate": 1.9754646553053346e-05, | |
| "loss": 1.4921, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.1584158415841583, | |
| "grad_norm": 0.27202197909355164, | |
| "learning_rate": 1.9754646553053346e-05, | |
| "loss": 1.5619, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.1633663366336633, | |
| "grad_norm": 0.27202197909355164, | |
| "learning_rate": 1.9754646553053346e-05, | |
| "loss": 1.5186, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.1683168316831682, | |
| "grad_norm": 0.28283485770225525, | |
| "learning_rate": 1.983781346074312e-05, | |
| "loss": 1.5185, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.1732673267326732, | |
| "grad_norm": 0.28283485770225525, | |
| "learning_rate": 1.983781346074312e-05, | |
| "loss": 1.4358, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.1782178217821782, | |
| "grad_norm": 0.28283485770225525, | |
| "learning_rate": 1.983781346074312e-05, | |
| "loss": 1.5415, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.183168316831683, | |
| "grad_norm": 0.28283485770225525, | |
| "learning_rate": 1.983781346074312e-05, | |
| "loss": 1.5427, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.188118811881188, | |
| "grad_norm": 0.28792083263397217, | |
| "learning_rate": 1.9919582538908503e-05, | |
| "loss": 1.456, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.193069306930693, | |
| "grad_norm": 0.28792083263397217, | |
| "learning_rate": 1.9919582538908503e-05, | |
| "loss": 1.4901, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.198019801980198, | |
| "grad_norm": 0.28792083263397217, | |
| "learning_rate": 1.9919582538908503e-05, | |
| "loss": 1.438, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.202970297029703, | |
| "grad_norm": 0.28792083263397217, | |
| "learning_rate": 1.9919582538908503e-05, | |
| "loss": 1.5229, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.2079207920792079, | |
| "grad_norm": 0.2743774354457855, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5547, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.2128712871287128, | |
| "grad_norm": 0.2743774354457855, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5215, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.2178217821782178, | |
| "grad_norm": 0.2743774354457855, | |
| "learning_rate": 2e-05, | |
| "loss": 1.452, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.2227722772277227, | |
| "grad_norm": 0.2743774354457855, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4676, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.2277227722772277, | |
| "grad_norm": 0.2722848951816559, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4569, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.2326732673267327, | |
| "grad_norm": 0.2722848951816559, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5278, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.2376237623762376, | |
| "grad_norm": 0.2722848951816559, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4752, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.2425742574257426, | |
| "grad_norm": 0.2722848951816559, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5639, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.2475247524752475, | |
| "grad_norm": 0.2844066917896271, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5385, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.2524752475247525, | |
| "grad_norm": 0.2844066917896271, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5141, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.2574257425742574, | |
| "grad_norm": 0.2844066917896271, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4676, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.2623762376237624, | |
| "grad_norm": 0.2844066917896271, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4632, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.2673267326732673, | |
| "grad_norm": 0.2843905985355377, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4868, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.2722772277227723, | |
| "grad_norm": 0.2843905985355377, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5614, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.2772277227722773, | |
| "grad_norm": 0.2843905985355377, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5404, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.2821782178217822, | |
| "grad_norm": 0.2843905985355377, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4836, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.2871287128712872, | |
| "grad_norm": 0.2771119773387909, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5339, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.2920792079207921, | |
| "grad_norm": 0.2771119773387909, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4168, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.297029702970297, | |
| "grad_norm": 0.2771119773387909, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4641, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.301980198019802, | |
| "grad_norm": 0.2771119773387909, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4616, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.306930693069307, | |
| "grad_norm": 0.27680638432502747, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4962, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.311881188118812, | |
| "grad_norm": 0.27680638432502747, | |
| "learning_rate": 2e-05, | |
| "loss": 1.455, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.316831683168317, | |
| "grad_norm": 0.27680638432502747, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4401, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.3217821782178218, | |
| "grad_norm": 0.27680638432502747, | |
| "learning_rate": 2e-05, | |
| "loss": 1.571, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.3267326732673268, | |
| "grad_norm": 0.2692127823829651, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5209, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.3316831683168318, | |
| "grad_norm": 0.2692127823829651, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5326, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.3366336633663367, | |
| "grad_norm": 0.2692127823829651, | |
| "learning_rate": 2e-05, | |
| "loss": 1.496, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.3415841584158417, | |
| "grad_norm": 0.2692127823829651, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4306, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.3465346534653464, | |
| "grad_norm": 0.26907190680503845, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4874, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.3514851485148514, | |
| "grad_norm": 0.26907190680503845, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4535, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.3564356435643563, | |
| "grad_norm": 0.26907190680503845, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4721, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.3613861386138613, | |
| "grad_norm": 0.26907190680503845, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3758, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.3663366336633662, | |
| "grad_norm": 0.26644808053970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4439, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.3712871287128712, | |
| "grad_norm": 0.26644808053970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4347, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.3762376237623761, | |
| "grad_norm": 0.26644808053970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4604, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.381188118811881, | |
| "grad_norm": 0.26644808053970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4547, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.386138613861386, | |
| "grad_norm": 0.2679310441017151, | |
| "learning_rate": 2e-05, | |
| "loss": 1.518, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.391089108910891, | |
| "grad_norm": 0.2679310441017151, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4687, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.396039603960396, | |
| "grad_norm": 0.2679310441017151, | |
| "learning_rate": 2e-05, | |
| "loss": 1.448, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.400990099009901, | |
| "grad_norm": 0.2679310441017151, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5097, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.4059405940594059, | |
| "grad_norm": 0.2934781014919281, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4957, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.4108910891089108, | |
| "grad_norm": 0.2934781014919281, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4581, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.4158415841584158, | |
| "grad_norm": 0.2934781014919281, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4936, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.4207920792079207, | |
| "grad_norm": 0.2934781014919281, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4964, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.4257425742574257, | |
| "grad_norm": 0.2842659056186676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3665, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.4306930693069306, | |
| "grad_norm": 0.2842659056186676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4713, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.4356435643564356, | |
| "grad_norm": 0.2842659056186676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4624, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.4405940594059405, | |
| "grad_norm": 0.2842659056186676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4777, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.4455445544554455, | |
| "grad_norm": 0.27639469504356384, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4559, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.4504950495049505, | |
| "grad_norm": 0.27639469504356384, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4857, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.4554455445544554, | |
| "grad_norm": 0.27639469504356384, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4328, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.4603960396039604, | |
| "grad_norm": 0.27639469504356384, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4704, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.4653465346534653, | |
| "grad_norm": 0.2794305086135864, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4608, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.4702970297029703, | |
| "grad_norm": 0.2794305086135864, | |
| "learning_rate": 2e-05, | |
| "loss": 1.458, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.4752475247524752, | |
| "grad_norm": 0.2794305086135864, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4629, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.4801980198019802, | |
| "grad_norm": 0.2794305086135864, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4899, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.4851485148514851, | |
| "grad_norm": 0.29005393385887146, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4461, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.49009900990099, | |
| "grad_norm": 0.29005393385887146, | |
| "learning_rate": 2e-05, | |
| "loss": 1.395, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.495049504950495, | |
| "grad_norm": 0.29005393385887146, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5044, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.29005393385887146, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4857, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.504950495049505, | |
| "grad_norm": 0.28084444999694824, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4932, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.50990099009901, | |
| "grad_norm": 0.28084444999694824, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4494, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.5148514851485149, | |
| "grad_norm": 0.28084444999694824, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4542, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.5198019801980198, | |
| "grad_norm": 0.28084444999694824, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4893, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.5247524752475248, | |
| "grad_norm": 0.283642441034317, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5155, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.5297029702970297, | |
| "grad_norm": 0.283642441034317, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3895, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.5346534653465347, | |
| "grad_norm": 0.283642441034317, | |
| "learning_rate": 2e-05, | |
| "loss": 1.488, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.5396039603960396, | |
| "grad_norm": 0.283642441034317, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5099, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.5445544554455446, | |
| "grad_norm": 0.2822302579879761, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4694, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.5495049504950495, | |
| "grad_norm": 0.2822302579879761, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4307, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.5544554455445545, | |
| "grad_norm": 0.2822302579879761, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4705, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.5594059405940595, | |
| "grad_norm": 0.2822302579879761, | |
| "learning_rate": 2e-05, | |
| "loss": 1.407, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.5643564356435644, | |
| "grad_norm": 0.2674373984336853, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5169, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.5693069306930694, | |
| "grad_norm": 0.2674373984336853, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4667, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.5742574257425743, | |
| "grad_norm": 0.2674373984336853, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4669, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.5792079207920793, | |
| "grad_norm": 0.2674373984336853, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4217, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.5841584158415842, | |
| "grad_norm": 0.27052974700927734, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4244, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.5891089108910892, | |
| "grad_norm": 0.27052974700927734, | |
| "learning_rate": 2e-05, | |
| "loss": 1.461, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.5940594059405941, | |
| "grad_norm": 0.27052974700927734, | |
| "learning_rate": 2e-05, | |
| "loss": 1.497, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.599009900990099, | |
| "grad_norm": 0.27052974700927734, | |
| "learning_rate": 2e-05, | |
| "loss": 1.503, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.603960396039604, | |
| "grad_norm": 0.2938457429409027, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4156, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.608910891089109, | |
| "grad_norm": 0.2938457429409027, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4303, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.613861386138614, | |
| "grad_norm": 0.2938457429409027, | |
| "learning_rate": 2e-05, | |
| "loss": 1.471, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.618811881188119, | |
| "grad_norm": 0.2938457429409027, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4311, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.6237623762376239, | |
| "grad_norm": 0.2872541844844818, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4992, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.6287128712871288, | |
| "grad_norm": 0.2872541844844818, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4592, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.6336633663366338, | |
| "grad_norm": 0.2872541844844818, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4477, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.6386138613861387, | |
| "grad_norm": 0.2872541844844818, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3685, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.6435643564356437, | |
| "grad_norm": 0.2954404056072235, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4691, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.6485148514851486, | |
| "grad_norm": 0.2954404056072235, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4652, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.6534653465346536, | |
| "grad_norm": 0.2954404056072235, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4048, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.6584158415841586, | |
| "grad_norm": 0.2954404056072235, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4597, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.6633663366336635, | |
| "grad_norm": 0.2781623899936676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4382, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.6683168316831685, | |
| "grad_norm": 0.2781623899936676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5407, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.6732673267326734, | |
| "grad_norm": 0.2781623899936676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4404, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.6782178217821784, | |
| "grad_norm": 0.2781623899936676, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4698, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.6831683168316833, | |
| "grad_norm": 0.3247792422771454, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4563, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.688118811881188, | |
| "grad_norm": 0.3247792422771454, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3681, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.693069306930693, | |
| "grad_norm": 0.3247792422771454, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4239, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.698019801980198, | |
| "grad_norm": 0.3247792422771454, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4099, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.702970297029703, | |
| "grad_norm": 0.2891463339328766, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4827, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.7079207920792079, | |
| "grad_norm": 0.2891463339328766, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4642, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.7128712871287128, | |
| "grad_norm": 0.2891463339328766, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5105, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.7178217821782178, | |
| "grad_norm": 0.2891463339328766, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4188, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.7227722772277227, | |
| "grad_norm": 0.2906363308429718, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4342, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.7277227722772277, | |
| "grad_norm": 0.2906363308429718, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4774, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.7326732673267327, | |
| "grad_norm": 0.2906363308429718, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5289, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.7376237623762376, | |
| "grad_norm": 0.2906363308429718, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4149, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.7425742574257426, | |
| "grad_norm": 0.2809044420719147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4506, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.7475247524752475, | |
| "grad_norm": 0.2809044420719147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4838, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.7524752475247525, | |
| "grad_norm": 0.2809044420719147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4642, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.7574257425742574, | |
| "grad_norm": 0.2809044420719147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.428, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.7623762376237624, | |
| "grad_norm": 0.2897462248802185, | |
| "learning_rate": 2e-05, | |
| "loss": 1.459, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.7673267326732673, | |
| "grad_norm": 0.2897462248802185, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4475, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.7722772277227723, | |
| "grad_norm": 0.2897462248802185, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4933, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.7772277227722773, | |
| "grad_norm": 0.2897462248802185, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4318, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.7821782178217822, | |
| "grad_norm": 0.2832702696323395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3858, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.7871287128712872, | |
| "grad_norm": 0.2832702696323395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4198, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.7920792079207921, | |
| "grad_norm": 0.2832702696323395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4236, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.797029702970297, | |
| "grad_norm": 0.2832702696323395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4393, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.801980198019802, | |
| "grad_norm": 0.2876037657260895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4811, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.806930693069307, | |
| "grad_norm": 0.2876037657260895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4662, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.811881188118812, | |
| "grad_norm": 0.2876037657260895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4535, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.8168316831683167, | |
| "grad_norm": 0.2876037657260895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4433, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.8217821782178216, | |
| "grad_norm": 0.2914987802505493, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4859, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.8267326732673266, | |
| "grad_norm": 0.2914987802505493, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4385, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.8316831683168315, | |
| "grad_norm": 0.2914987802505493, | |
| "learning_rate": 2e-05, | |
| "loss": 1.42, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.8366336633663365, | |
| "grad_norm": 0.2914987802505493, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4572, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.8415841584158414, | |
| "grad_norm": 0.2872907221317291, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4007, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.8465346534653464, | |
| "grad_norm": 0.2872907221317291, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4527, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.8514851485148514, | |
| "grad_norm": 0.2872907221317291, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4357, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.8564356435643563, | |
| "grad_norm": 0.2872907221317291, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4271, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.8613861386138613, | |
| "grad_norm": 0.28607606887817383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4886, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.8663366336633662, | |
| "grad_norm": 0.28607606887817383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4755, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.8712871287128712, | |
| "grad_norm": 0.28607606887817383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4275, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.8762376237623761, | |
| "grad_norm": 0.28607606887817383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4568, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.881188118811881, | |
| "grad_norm": 0.28154703974723816, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4347, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.886138613861386, | |
| "grad_norm": 0.28154703974723816, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4496, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.891089108910891, | |
| "grad_norm": 0.28154703974723816, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4331, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.896039603960396, | |
| "grad_norm": 0.28154703974723816, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4467, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.900990099009901, | |
| "grad_norm": 0.30775487422943115, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5031, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.9059405940594059, | |
| "grad_norm": 0.30775487422943115, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4401, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.9108910891089108, | |
| "grad_norm": 0.30775487422943115, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4608, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.9158415841584158, | |
| "grad_norm": 0.30775487422943115, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3846, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.9207920792079207, | |
| "grad_norm": 0.28769612312316895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3951, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.9257425742574257, | |
| "grad_norm": 0.28769612312316895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4128, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.9306930693069306, | |
| "grad_norm": 0.28769612312316895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4753, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.9356435643564356, | |
| "grad_norm": 0.28769612312316895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4264, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.9405940594059405, | |
| "grad_norm": 0.3035818636417389, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4908, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.9455445544554455, | |
| "grad_norm": 0.3035818636417389, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4205, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.9504950495049505, | |
| "grad_norm": 0.3035818636417389, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4378, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.9554455445544554, | |
| "grad_norm": 0.3035818636417389, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4151, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.9603960396039604, | |
| "grad_norm": 0.3105930685997009, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4885, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.9653465346534653, | |
| "grad_norm": 0.3105930685997009, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4208, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.9702970297029703, | |
| "grad_norm": 0.3105930685997009, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4401, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.9752475247524752, | |
| "grad_norm": 0.3105930685997009, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4563, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.9801980198019802, | |
| "grad_norm": 0.28015851974487305, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4198, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.9851485148514851, | |
| "grad_norm": 0.28015851974487305, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4868, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.99009900990099, | |
| "grad_norm": 0.28015851974487305, | |
| "learning_rate": 2e-05, | |
| "loss": 1.431, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.995049504950495, | |
| "grad_norm": 0.28015851974487305, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4577, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.28616049885749817, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3554, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.004950495049505, | |
| "grad_norm": 0.28616049885749817, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4254, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.00990099009901, | |
| "grad_norm": 0.28616049885749817, | |
| "learning_rate": 2e-05, | |
| "loss": 1.341, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.014851485148515, | |
| "grad_norm": 0.28616049885749817, | |
| "learning_rate": 2e-05, | |
| "loss": 1.423, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.01980198019802, | |
| "grad_norm": 0.317325234413147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3991, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.0247524752475248, | |
| "grad_norm": 0.317325234413147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3587, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.0297029702970297, | |
| "grad_norm": 0.317325234413147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4268, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.0346534653465347, | |
| "grad_norm": 0.317325234413147, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4146, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.0396039603960396, | |
| "grad_norm": 0.3060872256755829, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4125, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.0445544554455446, | |
| "grad_norm": 0.3060872256755829, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4337, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.0495049504950495, | |
| "grad_norm": 0.3060872256755829, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4261, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.0544554455445545, | |
| "grad_norm": 0.3060872256755829, | |
| "learning_rate": 2e-05, | |
| "loss": 1.36, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.0594059405940595, | |
| "grad_norm": 0.2912452518939972, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4411, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.0643564356435644, | |
| "grad_norm": 0.2912452518939972, | |
| "learning_rate": 2e-05, | |
| "loss": 1.463, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.0693069306930694, | |
| "grad_norm": 0.2912452518939972, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4353, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.0742574257425743, | |
| "grad_norm": 0.2912452518939972, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4165, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.0792079207920793, | |
| "grad_norm": 0.29850682616233826, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4432, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.0841584158415842, | |
| "grad_norm": 0.29850682616233826, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4062, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.089108910891089, | |
| "grad_norm": 0.29850682616233826, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4478, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.094059405940594, | |
| "grad_norm": 0.29850682616233826, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3477, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.099009900990099, | |
| "grad_norm": 0.27797263860702515, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3811, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.103960396039604, | |
| "grad_norm": 0.27797263860702515, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3697, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.108910891089109, | |
| "grad_norm": 0.27797263860702515, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3936, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.113861386138614, | |
| "grad_norm": 0.27797263860702515, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3869, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.118811881188119, | |
| "grad_norm": 0.2979559302330017, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4054, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.123762376237624, | |
| "grad_norm": 0.2979559302330017, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3993, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.128712871287129, | |
| "grad_norm": 0.2979559302330017, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4286, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.133663366336634, | |
| "grad_norm": 0.2979559302330017, | |
| "learning_rate": 2e-05, | |
| "loss": 1.417, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.1386138613861387, | |
| "grad_norm": 0.28421610593795776, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3958, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.1435643564356437, | |
| "grad_norm": 0.28421610593795776, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4134, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.1485148514851486, | |
| "grad_norm": 0.28421610593795776, | |
| "learning_rate": 2e-05, | |
| "loss": 1.418, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.1534653465346536, | |
| "grad_norm": 0.28421610593795776, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3599, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.1584158415841586, | |
| "grad_norm": 0.29607468843460083, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4297, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.1633663366336635, | |
| "grad_norm": 0.29607468843460083, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3682, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.1683168316831685, | |
| "grad_norm": 0.29607468843460083, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4431, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.1732673267326734, | |
| "grad_norm": 0.29607468843460083, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4598, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.1782178217821784, | |
| "grad_norm": 0.30585789680480957, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3998, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.1831683168316833, | |
| "grad_norm": 0.30585789680480957, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3854, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.1881188118811883, | |
| "grad_norm": 0.30585789680480957, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3953, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.1930693069306932, | |
| "grad_norm": 0.30585789680480957, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3797, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.198019801980198, | |
| "grad_norm": 0.28262895345687866, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3731, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.202970297029703, | |
| "grad_norm": 0.28262895345687866, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4521, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.207920792079208, | |
| "grad_norm": 0.28262895345687866, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4159, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.212871287128713, | |
| "grad_norm": 0.28262895345687866, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4149, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.217821782178218, | |
| "grad_norm": 0.2773006558418274, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3996, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.222772277227723, | |
| "grad_norm": 0.2773006558418274, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3302, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.227722772277228, | |
| "grad_norm": 0.2773006558418274, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3364, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.232673267326733, | |
| "grad_norm": 0.2773006558418274, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4101, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.237623762376238, | |
| "grad_norm": 0.2921248972415924, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3372, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.2425742574257423, | |
| "grad_norm": 0.2921248972415924, | |
| "learning_rate": 2e-05, | |
| "loss": 1.433, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.2475247524752477, | |
| "grad_norm": 0.2921248972415924, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3784, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.2524752475247523, | |
| "grad_norm": 0.2921248972415924, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3843, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.2574257425742577, | |
| "grad_norm": 0.2769683599472046, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3851, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.262376237623762, | |
| "grad_norm": 0.2769683599472046, | |
| "learning_rate": 2e-05, | |
| "loss": 1.416, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.2673267326732676, | |
| "grad_norm": 0.2769683599472046, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3678, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.272277227722772, | |
| "grad_norm": 0.2769683599472046, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3452, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.2772277227722775, | |
| "grad_norm": 0.285204142332077, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4036, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.282178217821782, | |
| "grad_norm": 0.285204142332077, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3349, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.287128712871287, | |
| "grad_norm": 0.285204142332077, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4302, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.292079207920792, | |
| "grad_norm": 0.285204142332077, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3687, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.297029702970297, | |
| "grad_norm": 0.27696162462234497, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4433, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.301980198019802, | |
| "grad_norm": 0.27696162462234497, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4201, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.3069306930693068, | |
| "grad_norm": 0.27696162462234497, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4355, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.3118811881188117, | |
| "grad_norm": 0.27696162462234497, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4331, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.3168316831683167, | |
| "grad_norm": 0.2807972729206085, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3849, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.3217821782178216, | |
| "grad_norm": 0.2807972729206085, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3458, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.3267326732673266, | |
| "grad_norm": 0.2807972729206085, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4451, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.3316831683168315, | |
| "grad_norm": 0.2807972729206085, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3855, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.3366336633663365, | |
| "grad_norm": 0.27739667892456055, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4201, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.3415841584158414, | |
| "grad_norm": 0.27739667892456055, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3699, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.3465346534653464, | |
| "grad_norm": 0.27739667892456055, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3877, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.3514851485148514, | |
| "grad_norm": 0.27739667892456055, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3925, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.3564356435643563, | |
| "grad_norm": 0.2919694483280182, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3989, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.3613861386138613, | |
| "grad_norm": 0.2919694483280182, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4233, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.366336633663366, | |
| "grad_norm": 0.2919694483280182, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3902, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.371287128712871, | |
| "grad_norm": 0.2919694483280182, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4339, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.376237623762376, | |
| "grad_norm": 0.276358962059021, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3965, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.381188118811881, | |
| "grad_norm": 0.276358962059021, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3959, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.386138613861386, | |
| "grad_norm": 0.276358962059021, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3689, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.391089108910891, | |
| "grad_norm": 0.276358962059021, | |
| "learning_rate": 2e-05, | |
| "loss": 1.411, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.396039603960396, | |
| "grad_norm": 0.2838156223297119, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4069, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.400990099009901, | |
| "grad_norm": 0.2838156223297119, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3636, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.405940594059406, | |
| "grad_norm": 0.2838156223297119, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3428, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.410891089108911, | |
| "grad_norm": 0.2838156223297119, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3681, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.4158415841584158, | |
| "grad_norm": 0.2741771340370178, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3872, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.4207920792079207, | |
| "grad_norm": 0.2741771340370178, | |
| "learning_rate": 2e-05, | |
| "loss": 1.357, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.4257425742574257, | |
| "grad_norm": 0.2741771340370178, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3631, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.4306930693069306, | |
| "grad_norm": 0.2741771340370178, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4068, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.4356435643564356, | |
| "grad_norm": 0.2892126441001892, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3679, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.4405940594059405, | |
| "grad_norm": 0.2892126441001892, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3479, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.4455445544554455, | |
| "grad_norm": 0.2892126441001892, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4556, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.4504950495049505, | |
| "grad_norm": 0.2892126441001892, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3408, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.4554455445544554, | |
| "grad_norm": 0.28148889541625977, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3008, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.4603960396039604, | |
| "grad_norm": 0.28148889541625977, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4694, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.4653465346534653, | |
| "grad_norm": 0.28148889541625977, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3694, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.4702970297029703, | |
| "grad_norm": 0.28148889541625977, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4251, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.4752475247524752, | |
| "grad_norm": 0.29076042771339417, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3562, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.48019801980198, | |
| "grad_norm": 0.29076042771339417, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3657, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.485148514851485, | |
| "grad_norm": 0.29076042771339417, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3946, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.49009900990099, | |
| "grad_norm": 0.29076042771339417, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3492, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.495049504950495, | |
| "grad_norm": 0.27691689133644104, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3678, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.27691689133644104, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4146, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.504950495049505, | |
| "grad_norm": 0.27691689133644104, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4215, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.50990099009901, | |
| "grad_norm": 0.27691689133644104, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3759, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.514851485148515, | |
| "grad_norm": 0.27186834812164307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.339, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.51980198019802, | |
| "grad_norm": 0.27186834812164307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3958, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.5247524752475248, | |
| "grad_norm": 0.27186834812164307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3675, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.5297029702970297, | |
| "grad_norm": 0.27186834812164307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3766, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.5346534653465347, | |
| "grad_norm": 0.3018421232700348, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3505, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.5396039603960396, | |
| "grad_norm": 0.3018421232700348, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3001, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.5445544554455446, | |
| "grad_norm": 0.3018421232700348, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3361, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.5495049504950495, | |
| "grad_norm": 0.3018421232700348, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4107, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.5544554455445545, | |
| "grad_norm": 0.2854144871234894, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4126, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.5594059405940595, | |
| "grad_norm": 0.2854144871234894, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3747, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.5643564356435644, | |
| "grad_norm": 0.2854144871234894, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3408, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.5693069306930694, | |
| "grad_norm": 0.2854144871234894, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3512, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.5742574257425743, | |
| "grad_norm": 0.2874051332473755, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4058, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.5792079207920793, | |
| "grad_norm": 0.2874051332473755, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4259, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.5841584158415842, | |
| "grad_norm": 0.2874051332473755, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3752, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.589108910891089, | |
| "grad_norm": 0.2874051332473755, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3503, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.594059405940594, | |
| "grad_norm": 0.27819254994392395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3721, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.599009900990099, | |
| "grad_norm": 0.27819254994392395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4515, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.603960396039604, | |
| "grad_norm": 0.27819254994392395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4081, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.608910891089109, | |
| "grad_norm": 0.27819254994392395, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3186, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.613861386138614, | |
| "grad_norm": 0.27905669808387756, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3056, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.618811881188119, | |
| "grad_norm": 0.27905669808387756, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3097, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.623762376237624, | |
| "grad_norm": 0.27905669808387756, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3153, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.628712871287129, | |
| "grad_norm": 0.27905669808387756, | |
| "learning_rate": 2e-05, | |
| "loss": 1.345, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.633663366336634, | |
| "grad_norm": 0.29262369871139526, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3759, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.6386138613861387, | |
| "grad_norm": 0.29262369871139526, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3587, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.6435643564356437, | |
| "grad_norm": 0.29262369871139526, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3036, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.6485148514851486, | |
| "grad_norm": 0.29262369871139526, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3814, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.6534653465346536, | |
| "grad_norm": 0.26941347122192383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.2526, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.6584158415841586, | |
| "grad_norm": 0.26941347122192383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.323, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.6633663366336635, | |
| "grad_norm": 0.26941347122192383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3945, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.6683168316831685, | |
| "grad_norm": 0.26941347122192383, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3668, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.6732673267326734, | |
| "grad_norm": 0.2864089012145996, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3519, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.6782178217821784, | |
| "grad_norm": 0.2864089012145996, | |
| "learning_rate": 2e-05, | |
| "loss": 1.402, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.6831683168316833, | |
| "grad_norm": 0.2864089012145996, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3737, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.6881188118811883, | |
| "grad_norm": 0.2864089012145996, | |
| "learning_rate": 2e-05, | |
| "loss": 1.345, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.693069306930693, | |
| "grad_norm": 0.2727324664592743, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3741, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.698019801980198, | |
| "grad_norm": 0.2727324664592743, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3779, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.7029702970297027, | |
| "grad_norm": 0.2727324664592743, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4057, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.707920792079208, | |
| "grad_norm": 0.2727324664592743, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3702, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.7128712871287126, | |
| "grad_norm": 0.2906956970691681, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4395, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.717821782178218, | |
| "grad_norm": 0.2906956970691681, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3275, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.7227722772277225, | |
| "grad_norm": 0.2906956970691681, | |
| "learning_rate": 2e-05, | |
| "loss": 1.396, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.727722772277228, | |
| "grad_norm": 0.2906956970691681, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3898, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.7326732673267324, | |
| "grad_norm": 0.2827296555042267, | |
| "learning_rate": 2e-05, | |
| "loss": 1.46, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.737623762376238, | |
| "grad_norm": 0.2827296555042267, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3724, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.7425742574257423, | |
| "grad_norm": 0.2827296555042267, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4224, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.7475247524752477, | |
| "grad_norm": 0.2827296555042267, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3255, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.7524752475247523, | |
| "grad_norm": 0.2894042730331421, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3698, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.7574257425742577, | |
| "grad_norm": 0.2894042730331421, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3767, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.762376237623762, | |
| "grad_norm": 0.2894042730331421, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3385, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.7673267326732676, | |
| "grad_norm": 0.2894042730331421, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3326, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.772277227722772, | |
| "grad_norm": 0.28423652052879333, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3213, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.7772277227722775, | |
| "grad_norm": 0.28423652052879333, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3641, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.782178217821782, | |
| "grad_norm": 0.28423652052879333, | |
| "learning_rate": 2e-05, | |
| "loss": 1.37, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.7871287128712874, | |
| "grad_norm": 0.28423652052879333, | |
| "learning_rate": 2e-05, | |
| "loss": 1.321, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.792079207920792, | |
| "grad_norm": 0.27789822220802307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4119, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.7970297029702973, | |
| "grad_norm": 0.27789822220802307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.351, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.801980198019802, | |
| "grad_norm": 0.27789822220802307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3619, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.806930693069307, | |
| "grad_norm": 0.27789822220802307, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3856, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.8118811881188117, | |
| "grad_norm": 0.28625088930130005, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3674, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.8168316831683167, | |
| "grad_norm": 0.28625088930130005, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4114, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.8217821782178216, | |
| "grad_norm": 0.28625088930130005, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4165, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.8267326732673266, | |
| "grad_norm": 0.28625088930130005, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3547, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.8316831683168315, | |
| "grad_norm": 0.30029016733169556, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3799, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.8366336633663365, | |
| "grad_norm": 0.30029016733169556, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4145, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.8415841584158414, | |
| "grad_norm": 0.30029016733169556, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3959, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.8465346534653464, | |
| "grad_norm": 0.30029016733169556, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3416, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.8514851485148514, | |
| "grad_norm": 0.29112035036087036, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3418, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.8564356435643563, | |
| "grad_norm": 0.29112035036087036, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3522, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.8613861386138613, | |
| "grad_norm": 0.29112035036087036, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3506, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.866336633663366, | |
| "grad_norm": 0.29112035036087036, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3808, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.871287128712871, | |
| "grad_norm": 0.2819567024707794, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3871, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.876237623762376, | |
| "grad_norm": 0.2819567024707794, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3984, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.881188118811881, | |
| "grad_norm": 0.2819567024707794, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3584, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.886138613861386, | |
| "grad_norm": 0.2819567024707794, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3971, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.891089108910891, | |
| "grad_norm": 0.28793245553970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4094, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.896039603960396, | |
| "grad_norm": 0.28793245553970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.396, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.900990099009901, | |
| "grad_norm": 0.28793245553970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.323, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.905940594059406, | |
| "grad_norm": 0.28793245553970337, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3936, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.910891089108911, | |
| "grad_norm": 0.28640323877334595, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3721, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.9158415841584158, | |
| "grad_norm": 0.28640323877334595, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3501, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.9207920792079207, | |
| "grad_norm": 0.28640323877334595, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3953, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.9257425742574257, | |
| "grad_norm": 0.28640323877334595, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3597, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.9306930693069306, | |
| "grad_norm": 0.28491514921188354, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3689, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.9356435643564356, | |
| "grad_norm": 0.28491514921188354, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3209, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.9405940594059405, | |
| "grad_norm": 0.28491514921188354, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4021, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.9455445544554455, | |
| "grad_norm": 0.28491514921188354, | |
| "learning_rate": 2e-05, | |
| "loss": 1.36, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.9504950495049505, | |
| "grad_norm": 0.2926208972930908, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3803, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.9554455445544554, | |
| "grad_norm": 0.2926208972930908, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3806, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.9603960396039604, | |
| "grad_norm": 0.2926208972930908, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3569, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.9653465346534653, | |
| "grad_norm": 0.2926208972930908, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3718, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.9702970297029703, | |
| "grad_norm": 0.2797441780567169, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3376, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.9752475247524752, | |
| "grad_norm": 0.2797441780567169, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3455, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.98019801980198, | |
| "grad_norm": 0.2797441780567169, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4179, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.985148514851485, | |
| "grad_norm": 0.2797441780567169, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3065, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.99009900990099, | |
| "grad_norm": 0.29282307624816895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3871, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.995049504950495, | |
| "grad_norm": 0.29282307624816895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3707, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.29282307624816895, | |
| "learning_rate": 2e-05, | |
| "loss": 1.3113, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 606, | |
| "total_flos": 3.877969000755364e+17, | |
| "train_loss": 1.531072208000095, | |
| "train_runtime": 382.5341, | |
| "train_samples_per_second": 403.737, | |
| "train_steps_per_second": 1.584 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 606, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 3000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.877969000755364e+17, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |