| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5005500550055005, | |
| "eval_steps": 500, | |
| "global_step": 455, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0011001100110011, | |
| "grad_norm": 6.46875, | |
| "learning_rate": 0.0, | |
| "loss": 2.144, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0022002200220022, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 3.057, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0033003300330033004, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 1.7201, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0044004400440044, | |
| "grad_norm": 4.09375, | |
| "learning_rate": 4.8e-05, | |
| "loss": 1.6661, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.005500550055005501, | |
| "grad_norm": 5.21875, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 2.2751, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006600660066006601, | |
| "grad_norm": 4.5, | |
| "learning_rate": 8e-05, | |
| "loss": 1.8133, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.007700770077007701, | |
| "grad_norm": 4.34375, | |
| "learning_rate": 7.99706098457017e-05, | |
| "loss": 2.1569, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0088008800880088, | |
| "grad_norm": 4.65625, | |
| "learning_rate": 7.994121969140339e-05, | |
| "loss": 1.7213, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.009900990099009901, | |
| "grad_norm": 4.46875, | |
| "learning_rate": 7.991182953710508e-05, | |
| "loss": 1.9531, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.011001100110011002, | |
| "grad_norm": 3.828125, | |
| "learning_rate": 7.988243938280677e-05, | |
| "loss": 1.9228, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0121012101210121, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 7.985304922850846e-05, | |
| "loss": 2.028, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.013201320132013201, | |
| "grad_norm": 3.90625, | |
| "learning_rate": 7.982365907421014e-05, | |
| "loss": 2.0012, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.014301430143014302, | |
| "grad_norm": 4.40625, | |
| "learning_rate": 7.979426891991184e-05, | |
| "loss": 2.0968, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.015401540154015401, | |
| "grad_norm": 3.78125, | |
| "learning_rate": 7.976487876561353e-05, | |
| "loss": 1.5875, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0165016501650165, | |
| "grad_norm": 4.15625, | |
| "learning_rate": 7.973548861131522e-05, | |
| "loss": 1.9844, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0176017601760176, | |
| "grad_norm": 4.15625, | |
| "learning_rate": 7.97060984570169e-05, | |
| "loss": 1.5483, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0187018701870187, | |
| "grad_norm": 4.03125, | |
| "learning_rate": 7.96767083027186e-05, | |
| "loss": 1.801, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.019801980198019802, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 7.964731814842029e-05, | |
| "loss": 1.9239, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.020902090209020903, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 7.961792799412197e-05, | |
| "loss": 1.8882, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.022002200220022004, | |
| "grad_norm": 3.6875, | |
| "learning_rate": 7.958853783982366e-05, | |
| "loss": 1.4882, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0231023102310231, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 7.955914768552536e-05, | |
| "loss": 1.7706, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0242024202420242, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 7.952975753122704e-05, | |
| "loss": 1.4345, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.025302530253025302, | |
| "grad_norm": 3.953125, | |
| "learning_rate": 7.950036737692873e-05, | |
| "loss": 1.9439, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.026402640264026403, | |
| "grad_norm": 3.25, | |
| "learning_rate": 7.947097722263042e-05, | |
| "loss": 1.5659, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.027502750275027504, | |
| "grad_norm": 4.15625, | |
| "learning_rate": 7.944158706833211e-05, | |
| "loss": 1.4751, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.028602860286028604, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 7.94121969140338e-05, | |
| "loss": 1.7868, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.0297029702970297, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 7.93828067597355e-05, | |
| "loss": 1.5937, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.030803080308030802, | |
| "grad_norm": 3.71875, | |
| "learning_rate": 7.935341660543718e-05, | |
| "loss": 1.375, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0319031903190319, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 7.932402645113887e-05, | |
| "loss": 1.5409, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.033003300330033, | |
| "grad_norm": 3.5625, | |
| "learning_rate": 7.929463629684057e-05, | |
| "loss": 2.0894, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.034103410341034104, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 7.926524614254226e-05, | |
| "loss": 1.5313, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.0352035203520352, | |
| "grad_norm": 3.65625, | |
| "learning_rate": 7.923585598824395e-05, | |
| "loss": 1.532, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.036303630363036306, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 7.920646583394564e-05, | |
| "loss": 1.7818, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.0374037403740374, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 7.917707567964733e-05, | |
| "loss": 1.6455, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03850385038503851, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 7.914768552534902e-05, | |
| "loss": 1.8688, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.039603960396039604, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 7.911829537105071e-05, | |
| "loss": 1.5657, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.0407040704070407, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 7.90889052167524e-05, | |
| "loss": 1.9176, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.041804180418041806, | |
| "grad_norm": 3.625, | |
| "learning_rate": 7.905951506245409e-05, | |
| "loss": 2.0095, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.0429042904290429, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 7.903012490815578e-05, | |
| "loss": 1.6829, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04400440044004401, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 7.900073475385747e-05, | |
| "loss": 1.4805, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.045104510451045104, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 7.897134459955915e-05, | |
| "loss": 1.7129, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0462046204620462, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.894195444526084e-05, | |
| "loss": 1.846, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.047304730473047306, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 7.891256429096254e-05, | |
| "loss": 1.5123, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.0484048404840484, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 7.888317413666422e-05, | |
| "loss": 1.5001, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04950495049504951, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 7.885378398236591e-05, | |
| "loss": 1.7146, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.050605060506050605, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.88243938280676e-05, | |
| "loss": 1.5773, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.0517051705170517, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 7.87950036737693e-05, | |
| "loss": 1.5799, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.052805280528052806, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 7.876561351947098e-05, | |
| "loss": 1.8769, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.0539053905390539, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 7.873622336517267e-05, | |
| "loss": 1.8068, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05500550055005501, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 7.870683321087436e-05, | |
| "loss": 1.8403, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.056105610561056105, | |
| "grad_norm": 3.125, | |
| "learning_rate": 7.867744305657605e-05, | |
| "loss": 1.3685, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05720572057205721, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 7.864805290227774e-05, | |
| "loss": 1.983, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.058305830583058306, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.861866274797943e-05, | |
| "loss": 1.7412, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.0594059405940594, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 7.858927259368112e-05, | |
| "loss": 1.8109, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06050605060506051, | |
| "grad_norm": 2.875, | |
| "learning_rate": 7.855988243938281e-05, | |
| "loss": 1.7846, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.061606160616061605, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 7.85304922850845e-05, | |
| "loss": 1.4929, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0627062706270627, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 7.85011021307862e-05, | |
| "loss": 1.5486, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.0638063806380638, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 7.847171197648789e-05, | |
| "loss": 1.7472, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06490649064906491, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 7.844232182218956e-05, | |
| "loss": 1.9393, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.066006600660066, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 7.841293166789127e-05, | |
| "loss": 1.6674, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0671067106710671, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 7.838354151359296e-05, | |
| "loss": 1.5629, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06820682068206821, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 7.835415135929465e-05, | |
| "loss": 1.4875, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06930693069306931, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 7.832476120499632e-05, | |
| "loss": 1.7542, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.0704070407040704, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 7.829537105069803e-05, | |
| "loss": 1.3666, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07150715071507151, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 7.826598089639972e-05, | |
| "loss": 1.8975, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07260726072607261, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 7.82365907421014e-05, | |
| "loss": 1.6946, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.0737073707370737, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.820720058780309e-05, | |
| "loss": 1.9786, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.0748074807480748, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 7.817781043350479e-05, | |
| "loss": 1.5855, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07590759075907591, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.814842027920647e-05, | |
| "loss": 1.4395, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07700770077007701, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 7.811903012490816e-05, | |
| "loss": 1.7063, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0781078107810781, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 7.808963997060985e-05, | |
| "loss": 1.7596, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07920792079207921, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 7.806024981631155e-05, | |
| "loss": 1.6308, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.08030803080308031, | |
| "grad_norm": 2.9375, | |
| "learning_rate": 7.803085966201323e-05, | |
| "loss": 1.513, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.0814081408140814, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 7.800146950771492e-05, | |
| "loss": 1.8351, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08250825082508251, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 7.797207935341661e-05, | |
| "loss": 1.9086, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08360836083608361, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.79426891991183e-05, | |
| "loss": 1.7254, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.0847084708470847, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 7.791329904481999e-05, | |
| "loss": 1.5895, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.0858085808580858, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 7.788390889052168e-05, | |
| "loss": 1.409, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08690869086908691, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 7.785451873622337e-05, | |
| "loss": 1.912, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08800880088008801, | |
| "grad_norm": 3.015625, | |
| "learning_rate": 7.782512858192506e-05, | |
| "loss": 1.8979, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0891089108910891, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 7.779573842762675e-05, | |
| "loss": 1.772, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.09020902090209021, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.776634827332844e-05, | |
| "loss": 1.7706, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.09130913091309131, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 7.773695811903013e-05, | |
| "loss": 1.7933, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.0924092409240924, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.770756796473181e-05, | |
| "loss": 1.4408, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09350935093509351, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.767817781043352e-05, | |
| "loss": 1.6825, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09460946094609461, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 7.76487876561352e-05, | |
| "loss": 1.9553, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09570957095709572, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 7.76193975018369e-05, | |
| "loss": 1.3714, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.0968096809680968, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 7.759000734753857e-05, | |
| "loss": 1.8458, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09790979097909791, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 7.756061719324028e-05, | |
| "loss": 1.7907, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09900990099009901, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 7.753122703894197e-05, | |
| "loss": 1.7021, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1001100110011001, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 7.750183688464365e-05, | |
| "loss": 1.7753, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.10121012101210121, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.747244673034534e-05, | |
| "loss": 1.7924, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.10231023102310231, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.744305657604703e-05, | |
| "loss": 1.7719, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.1034103410341034, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.741366642174872e-05, | |
| "loss": 1.476, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.10451045104510451, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 7.738427626745041e-05, | |
| "loss": 1.588, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.10561056105610561, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.73548861131521e-05, | |
| "loss": 1.614, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.10671067106710672, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.732549595885379e-05, | |
| "loss": 1.3961, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1078107810781078, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.729610580455548e-05, | |
| "loss": 1.6342, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.10891089108910891, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 7.726671565025717e-05, | |
| "loss": 1.672, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.11001100110011001, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 7.723732549595886e-05, | |
| "loss": 1.5051, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 7.720793534166055e-05, | |
| "loss": 1.9235, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.11221122112211221, | |
| "grad_norm": 2.5, | |
| "learning_rate": 7.717854518736224e-05, | |
| "loss": 1.4361, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11331133113311331, | |
| "grad_norm": 3.0, | |
| "learning_rate": 7.714915503306393e-05, | |
| "loss": 1.6646, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11441144114411442, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 7.711976487876562e-05, | |
| "loss": 1.4909, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.11551155115511551, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 7.709037472446731e-05, | |
| "loss": 1.6798, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11661166116611661, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.7060984570169e-05, | |
| "loss": 1.9486, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.11771177117711772, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.70315944158707e-05, | |
| "loss": 1.5624, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.1188118811881188, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.700220426157238e-05, | |
| "loss": 1.4138, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11991199119911991, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 7.697281410727407e-05, | |
| "loss": 1.4676, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.12101210121012101, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.694342395297575e-05, | |
| "loss": 1.6737, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12211221122112212, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 7.691403379867746e-05, | |
| "loss": 1.3377, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.12321232123212321, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.688464364437915e-05, | |
| "loss": 1.6819, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12431243124312431, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 7.685525349008082e-05, | |
| "loss": 1.4974, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1254125412541254, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.682586333578251e-05, | |
| "loss": 1.61, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.1265126512651265, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.679647318148422e-05, | |
| "loss": 1.5181, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1276127612761276, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.67670830271859e-05, | |
| "loss": 1.5267, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12871287128712872, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.673769287288758e-05, | |
| "loss": 1.5479, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12981298129812982, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.670830271858928e-05, | |
| "loss": 1.8229, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.13091309130913092, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.667891256429098e-05, | |
| "loss": 1.6233, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.132013201320132, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 7.664952240999266e-05, | |
| "loss": 2.0874, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1331133113311331, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 7.662013225569435e-05, | |
| "loss": 1.7253, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1342134213421342, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 7.659074210139604e-05, | |
| "loss": 1.4686, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.1353135313531353, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.656135194709773e-05, | |
| "loss": 1.7246, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13641364136413642, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 7.653196179279942e-05, | |
| "loss": 1.5427, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.13751375137513752, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 7.650257163850111e-05, | |
| "loss": 1.7827, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13861386138613863, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.64731814842028e-05, | |
| "loss": 1.8431, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1397139713971397, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.644379132990449e-05, | |
| "loss": 1.5337, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1408140814081408, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.641440117560618e-05, | |
| "loss": 1.7567, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1419141914191419, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 7.638501102130787e-05, | |
| "loss": 1.8852, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.14301430143014301, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.635562086700956e-05, | |
| "loss": 1.6625, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14411441144114412, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 7.632623071271124e-05, | |
| "loss": 1.9057, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.14521452145214522, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 7.629684055841294e-05, | |
| "loss": 1.228, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14631463146314633, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.626745040411463e-05, | |
| "loss": 1.8245, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.1474147414741474, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.623806024981632e-05, | |
| "loss": 1.5367, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.6208670095518e-05, | |
| "loss": 1.8345, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1496149614961496, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 7.61792799412197e-05, | |
| "loss": 1.8428, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.15071507150715072, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.61498897869214e-05, | |
| "loss": 2.0396, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.15181518151815182, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.612049963262307e-05, | |
| "loss": 1.7412, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.15291529152915292, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.609110947832476e-05, | |
| "loss": 1.6583, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.15401540154015403, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.606171932402647e-05, | |
| "loss": 2.0174, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1551155115511551, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.603232916972814e-05, | |
| "loss": 1.5039, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.1562156215621562, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 7.600293901542983e-05, | |
| "loss": 1.7029, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.1573157315731573, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 7.597354886113152e-05, | |
| "loss": 2.1031, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.15841584158415842, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 7.594415870683321e-05, | |
| "loss": 1.7631, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.15951595159515952, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 7.59147685525349e-05, | |
| "loss": 1.5917, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16061606160616063, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 7.58853783982366e-05, | |
| "loss": 1.5669, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.1617161716171617, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.585598824393829e-05, | |
| "loss": 1.5334, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.1628162816281628, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.582659808963998e-05, | |
| "loss": 1.6525, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.1639163916391639, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 7.579720793534167e-05, | |
| "loss": 1.6358, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16501650165016502, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 7.576781778104336e-05, | |
| "loss": 1.6799, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16611661166116612, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.573842762674505e-05, | |
| "loss": 1.5884, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.16721672167216722, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 7.570903747244674e-05, | |
| "loss": 1.6021, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.16831683168316833, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 7.567964731814843e-05, | |
| "loss": 1.9629, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.1694169416941694, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 7.565025716385012e-05, | |
| "loss": 1.6163, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1705170517051705, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 7.562086700955181e-05, | |
| "loss": 1.7119, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1716171617161716, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.559147685525349e-05, | |
| "loss": 1.5513, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.17271727172717272, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.556208670095519e-05, | |
| "loss": 1.5609, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.17381738173817382, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 7.553269654665688e-05, | |
| "loss": 1.7749, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17491749174917492, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 7.550330639235857e-05, | |
| "loss": 1.6209, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.17601760176017603, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 7.547391623806025e-05, | |
| "loss": 1.7637, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1771177117711771, | |
| "grad_norm": 2.25, | |
| "learning_rate": 7.544452608376194e-05, | |
| "loss": 1.1374, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.1782178217821782, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.541513592946364e-05, | |
| "loss": 1.3335, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.1793179317931793, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.538574577516532e-05, | |
| "loss": 1.727, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.18041804180418042, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 7.535635562086701e-05, | |
| "loss": 1.6084, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.18151815181518152, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 7.53269654665687e-05, | |
| "loss": 2.068, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.18261826182618263, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.529757531227039e-05, | |
| "loss": 1.4939, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.18371837183718373, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.526818515797208e-05, | |
| "loss": 1.4844, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.1848184818481848, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.523879500367377e-05, | |
| "loss": 1.4863, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.1859185918591859, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.520940484937546e-05, | |
| "loss": 1.5916, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.18701870187018702, | |
| "grad_norm": 2.875, | |
| "learning_rate": 7.518001469507715e-05, | |
| "loss": 1.6779, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18811881188118812, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 7.515062454077884e-05, | |
| "loss": 1.4509, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.18921892189218922, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 7.512123438648054e-05, | |
| "loss": 1.5561, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.19031903190319033, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.509184423218223e-05, | |
| "loss": 1.5905, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.19141914191419143, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.506245407788392e-05, | |
| "loss": 1.604, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.1925192519251925, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.503306392358561e-05, | |
| "loss": 1.5499, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1936193619361936, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.50036737692873e-05, | |
| "loss": 2.0668, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.19471947194719472, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.497428361498899e-05, | |
| "loss": 1.9587, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.19581958195819582, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.494489346069066e-05, | |
| "loss": 1.7427, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.19691969196919692, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.491550330639237e-05, | |
| "loss": 1.9622, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.19801980198019803, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.488611315209406e-05, | |
| "loss": 1.4033, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19911991199119913, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.485672299779575e-05, | |
| "loss": 1.5019, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2002200220022002, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.482733284349743e-05, | |
| "loss": 1.9215, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.20132013201320131, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.479794268919913e-05, | |
| "loss": 1.7523, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.20242024202420242, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 7.476855253490082e-05, | |
| "loss": 1.7564, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.20352035203520352, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.47391623806025e-05, | |
| "loss": 1.6221, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.20462046204620463, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.470977222630419e-05, | |
| "loss": 1.5086, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.20572057205720573, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 7.468038207200589e-05, | |
| "loss": 1.881, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2068206820682068, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.465099191770757e-05, | |
| "loss": 1.4287, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2079207920792079, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.462160176340926e-05, | |
| "loss": 1.6612, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.20902090209020902, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 7.459221160911095e-05, | |
| "loss": 1.5406, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.21012101210121012, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.456282145481264e-05, | |
| "loss": 1.7948, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.21122112211221122, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 7.453343130051433e-05, | |
| "loss": 1.7622, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.21232123212321233, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 7.450404114621602e-05, | |
| "loss": 1.5527, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.21342134213421343, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.447465099191771e-05, | |
| "loss": 1.9561, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2145214521452145, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.44452608376194e-05, | |
| "loss": 1.7265, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2156215621562156, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.44158706833211e-05, | |
| "loss": 1.6651, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.21672167216721672, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 7.438648052902278e-05, | |
| "loss": 1.4983, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.21782178217821782, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.435709037472447e-05, | |
| "loss": 1.718, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.21892189218921893, | |
| "grad_norm": 2.5, | |
| "learning_rate": 7.432770022042617e-05, | |
| "loss": 1.7679, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.22002200220022003, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.429831006612786e-05, | |
| "loss": 1.6781, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.22112211221122113, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.426891991182955e-05, | |
| "loss": 1.637, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.423952975753124e-05, | |
| "loss": 1.834, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.22332233223322331, | |
| "grad_norm": 2.25, | |
| "learning_rate": 7.421013960323291e-05, | |
| "loss": 1.5641, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.22442244224422442, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 7.418074944893462e-05, | |
| "loss": 1.3664, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.22552255225522552, | |
| "grad_norm": 2.5, | |
| "learning_rate": 7.415135929463631e-05, | |
| "loss": 1.5489, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.22662266226622663, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.4121969140338e-05, | |
| "loss": 1.6292, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.22772277227722773, | |
| "grad_norm": 2.75, | |
| "learning_rate": 7.409257898603968e-05, | |
| "loss": 1.5634, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.22882288228822883, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.406318883174137e-05, | |
| "loss": 1.5732, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2299229922992299, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 7.403379867744307e-05, | |
| "loss": 1.5683, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.23102310231023102, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 7.400440852314475e-05, | |
| "loss": 1.8769, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.23212321232123212, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.397501836884644e-05, | |
| "loss": 1.6611, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.23322332233223322, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 7.394562821454813e-05, | |
| "loss": 1.518, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.23432343234323433, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.391623806024982e-05, | |
| "loss": 1.8108, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.23542354235423543, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.388684790595151e-05, | |
| "loss": 1.5755, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.23652365236523654, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.38574577516532e-05, | |
| "loss": 1.7679, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2376237623762376, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.382806759735489e-05, | |
| "loss": 1.8811, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.23872387238723872, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.379867744305658e-05, | |
| "loss": 1.7181, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.23982398239823982, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 7.376928728875827e-05, | |
| "loss": 1.5724, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.24092409240924093, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 7.373989713445996e-05, | |
| "loss": 1.6301, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.24202420242024203, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.371050698016165e-05, | |
| "loss": 2.0522, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.24312431243124313, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.368111682586334e-05, | |
| "loss": 1.9336, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.24422442244224424, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.365172667156503e-05, | |
| "loss": 1.873, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.24532453245324531, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 7.362233651726672e-05, | |
| "loss": 1.2278, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.24642464246424642, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.359294636296841e-05, | |
| "loss": 1.7754, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.24752475247524752, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 7.356355620867009e-05, | |
| "loss": 1.6586, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.24862486248624863, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 7.35341660543718e-05, | |
| "loss": 1.7999, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.24972497249724973, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 7.350477590007349e-05, | |
| "loss": 1.7571, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.2508250825082508, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 7.347538574577516e-05, | |
| "loss": 1.7123, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.25192519251925194, | |
| "grad_norm": 2.25, | |
| "learning_rate": 7.344599559147685e-05, | |
| "loss": 1.4597, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.253025302530253, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.341660543717856e-05, | |
| "loss": 1.6806, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.25412541254125415, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 7.338721528288025e-05, | |
| "loss": 1.5726, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.2552255225522552, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.335782512858192e-05, | |
| "loss": 1.7357, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.2563256325632563, | |
| "grad_norm": 3.25, | |
| "learning_rate": 7.332843497428362e-05, | |
| "loss": 1.5015, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.25742574257425743, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.329904481998532e-05, | |
| "loss": 1.5673, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.2585258525852585, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 7.3269654665687e-05, | |
| "loss": 1.3667, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.25962596259625964, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.324026451138869e-05, | |
| "loss": 1.8071, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.2607260726072607, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 7.321087435709038e-05, | |
| "loss": 1.7789, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.26182618261826185, | |
| "grad_norm": 1.96875, | |
| "learning_rate": 7.318148420279207e-05, | |
| "loss": 1.2831, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.2629262926292629, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.315209404849376e-05, | |
| "loss": 1.5218, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.264026402640264, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 7.312270389419545e-05, | |
| "loss": 1.6039, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.26512651265126513, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.309331373989714e-05, | |
| "loss": 1.4841, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.2662266226622662, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 7.306392358559883e-05, | |
| "loss": 1.696, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.26732673267326734, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.303453343130052e-05, | |
| "loss": 1.5164, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.2684268426842684, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 7.300514327700221e-05, | |
| "loss": 1.4525, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.26952695269526955, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.29757531227039e-05, | |
| "loss": 1.573, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2706270627062706, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.294636296840559e-05, | |
| "loss": 1.8129, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2717271727172717, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.291697281410728e-05, | |
| "loss": 1.7507, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.27282728272827284, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 7.288758265980897e-05, | |
| "loss": 1.5875, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2739273927392739, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.285819250551066e-05, | |
| "loss": 1.6278, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.27502750275027504, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.282880235121234e-05, | |
| "loss": 1.6012, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2761276127612761, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 7.279941219691404e-05, | |
| "loss": 1.9209, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.27722772277227725, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 7.277002204261574e-05, | |
| "loss": 1.7311, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.27832783278327833, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 7.274063188831743e-05, | |
| "loss": 1.6439, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.2794279427942794, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 7.27112417340191e-05, | |
| "loss": 1.7193, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.28052805280528054, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.26818515797208e-05, | |
| "loss": 1.5784, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2816281628162816, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 7.26524614254225e-05, | |
| "loss": 1.5997, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.28272827282728275, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 7.262307127112417e-05, | |
| "loss": 1.4495, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.2838283828382838, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.259368111682586e-05, | |
| "loss": 1.8039, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.28492849284928495, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.256429096252755e-05, | |
| "loss": 1.7299, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.28602860286028603, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.253490080822925e-05, | |
| "loss": 1.6577, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2871287128712871, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.250551065393094e-05, | |
| "loss": 1.5686, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.28822882288228824, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 7.247612049963263e-05, | |
| "loss": 1.6936, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.2893289328932893, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.244673034533432e-05, | |
| "loss": 1.6112, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.29042904290429045, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 7.241734019103601e-05, | |
| "loss": 1.6046, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.2915291529152915, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.23879500367377e-05, | |
| "loss": 1.5808, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.29262926292629265, | |
| "grad_norm": 2.5625, | |
| "learning_rate": 7.235855988243939e-05, | |
| "loss": 1.6693, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.29372937293729373, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 7.232916972814108e-05, | |
| "loss": 1.5833, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.2948294829482948, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.229977957384277e-05, | |
| "loss": 1.4142, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.29592959295929594, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 7.227038941954446e-05, | |
| "loss": 1.7591, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.224099926524615e-05, | |
| "loss": 1.5419, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.29812981298129815, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 7.221160911094784e-05, | |
| "loss": 1.7169, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.2992299229922992, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.218221895664953e-05, | |
| "loss": 1.5188, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.30033003300330036, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.215282880235122e-05, | |
| "loss": 1.8599, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.30143014301430143, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.212343864805291e-05, | |
| "loss": 1.7104, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.3025302530253025, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 7.209404849375459e-05, | |
| "loss": 1.5099, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.30363036303630364, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.206465833945628e-05, | |
| "loss": 1.5086, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.3047304730473047, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 7.203526818515798e-05, | |
| "loss": 1.734, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.30583058305830585, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.200587803085967e-05, | |
| "loss": 1.7063, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3069306930693069, | |
| "grad_norm": 2.25, | |
| "learning_rate": 7.197648787656135e-05, | |
| "loss": 1.5324, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.30803080308030806, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 7.194709772226304e-05, | |
| "loss": 1.5914, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.30913091309130913, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 7.191770756796475e-05, | |
| "loss": 1.6797, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3102310231023102, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 7.188831741366642e-05, | |
| "loss": 1.4601, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.31133113311331134, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.185892725936811e-05, | |
| "loss": 1.5889, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3124312431243124, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.18295371050698e-05, | |
| "loss": 1.8779, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.31353135313531355, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.18001469507715e-05, | |
| "loss": 1.5158, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3146314631463146, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 7.177075679647319e-05, | |
| "loss": 1.4873, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.31573157315731576, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.174136664217488e-05, | |
| "loss": 1.7513, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.31683168316831684, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.171197648787657e-05, | |
| "loss": 1.5886, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3179317931793179, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.168258633357826e-05, | |
| "loss": 1.7757, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.31903190319031904, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 7.165319617927995e-05, | |
| "loss": 1.6159, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3201320132013201, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.162380602498164e-05, | |
| "loss": 1.7434, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.32123212321232125, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.159441587068333e-05, | |
| "loss": 1.7193, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.32233223322332233, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.156502571638502e-05, | |
| "loss": 1.6798, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.3234323432343234, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.153563556208671e-05, | |
| "loss": 1.6219, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.32453245324532454, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.15062454077884e-05, | |
| "loss": 1.4986, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3256325632563256, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.147685525349009e-05, | |
| "loss": 1.6969, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.32673267326732675, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.144746509919177e-05, | |
| "loss": 1.7282, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.3278327832783278, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.141807494489347e-05, | |
| "loss": 1.6309, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.32893289328932895, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 7.138868479059516e-05, | |
| "loss": 1.6723, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.33003300330033003, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.135929463629684e-05, | |
| "loss": 1.6562, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3311331133113311, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.132990448199853e-05, | |
| "loss": 1.8531, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.33223322332233224, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 7.130051432770023e-05, | |
| "loss": 1.5951, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 7.127112417340192e-05, | |
| "loss": 1.8763, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.33443344334433445, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.12417340191036e-05, | |
| "loss": 1.5892, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3355335533553355, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 7.121234386480529e-05, | |
| "loss": 1.6187, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.33663366336633666, | |
| "grad_norm": 2.5, | |
| "learning_rate": 7.1182953710507e-05, | |
| "loss": 1.6265, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.33773377337733773, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.115356355620867e-05, | |
| "loss": 1.7981, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3388338833883388, | |
| "grad_norm": 3.015625, | |
| "learning_rate": 7.112417340191036e-05, | |
| "loss": 1.6605, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.33993399339933994, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.109478324761205e-05, | |
| "loss": 1.3162, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.341034103410341, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 7.106539309331374e-05, | |
| "loss": 1.6, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.34213421342134215, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.103600293901543e-05, | |
| "loss": 1.7578, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.3432343234323432, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.100661278471712e-05, | |
| "loss": 1.8671, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.34433443344334436, | |
| "grad_norm": 2.625, | |
| "learning_rate": 7.097722263041882e-05, | |
| "loss": 1.5863, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.34543454345434543, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.09478324761205e-05, | |
| "loss": 1.4452, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.3465346534653465, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.09184423218222e-05, | |
| "loss": 1.7327, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.34763476347634764, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.088905216752389e-05, | |
| "loss": 1.9633, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3487348734873487, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 7.085966201322558e-05, | |
| "loss": 1.6835, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.34983498349834985, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.083027185892727e-05, | |
| "loss": 1.699, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.3509350935093509, | |
| "grad_norm": 2.375, | |
| "learning_rate": 7.080088170462896e-05, | |
| "loss": 1.4307, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.35203520352035206, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 7.077149155033065e-05, | |
| "loss": 1.4165, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.35313531353135313, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.074210139603234e-05, | |
| "loss": 1.5924, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.3542354235423542, | |
| "grad_norm": 2.5, | |
| "learning_rate": 7.071271124173402e-05, | |
| "loss": 1.7753, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.35533553355335534, | |
| "grad_norm": 2.25, | |
| "learning_rate": 7.068332108743572e-05, | |
| "loss": 1.9986, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.3564356435643564, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 7.065393093313741e-05, | |
| "loss": 1.6479, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.35753575357535755, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 7.06245407788391e-05, | |
| "loss": 1.7177, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3586358635863586, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.059515062454078e-05, | |
| "loss": 1.8747, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.35973597359735976, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 7.056576047024247e-05, | |
| "loss": 1.5444, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.36083608360836084, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 7.053637031594417e-05, | |
| "loss": 1.6521, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.3619361936193619, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 7.050698016164585e-05, | |
| "loss": 1.6102, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.36303630363036304, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.047759000734754e-05, | |
| "loss": 1.514, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3641364136413641, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 7.044819985304923e-05, | |
| "loss": 1.4824, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.36523652365236525, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 7.041880969875092e-05, | |
| "loss": 1.3038, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.36633663366336633, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 7.038941954445261e-05, | |
| "loss": 2.0307, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.36743674367436746, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 7.03600293901543e-05, | |
| "loss": 1.7726, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.36853685368536854, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 7.033063923585599e-05, | |
| "loss": 1.6265, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3696369636963696, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.030124908155768e-05, | |
| "loss": 1.67, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.37073707370737075, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.027185892725937e-05, | |
| "loss": 1.7445, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.3718371837183718, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 7.024246877296106e-05, | |
| "loss": 1.5664, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.37293729372937295, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 7.021307861866275e-05, | |
| "loss": 1.395, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.37403740374037403, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 7.018368846436445e-05, | |
| "loss": 1.5827, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.37513751375137516, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 7.015429831006614e-05, | |
| "loss": 1.3384, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.37623762376237624, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 7.012490815576783e-05, | |
| "loss": 1.5161, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.3773377337733773, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.009551800146952e-05, | |
| "loss": 1.9063, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.37843784378437845, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 7.00661278471712e-05, | |
| "loss": 1.6597, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.3795379537953795, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 7.00367376928729e-05, | |
| "loss": 1.5507, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.38063806380638066, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.000734753857459e-05, | |
| "loss": 1.6965, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.38173817381738173, | |
| "grad_norm": 2.625, | |
| "learning_rate": 6.997795738427627e-05, | |
| "loss": 1.4365, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.38283828382838286, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.994856722997796e-05, | |
| "loss": 1.6257, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.38393839383938394, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.991917707567966e-05, | |
| "loss": 1.4717, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.385038503850385, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.988978692138135e-05, | |
| "loss": 1.7631, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.38613861386138615, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 6.986039676708303e-05, | |
| "loss": 1.4739, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.3872387238723872, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 6.983100661278472e-05, | |
| "loss": 1.5912, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.38833883388338836, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.980161645848642e-05, | |
| "loss": 1.8626, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.38943894389438943, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.97722263041881e-05, | |
| "loss": 1.6295, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.39053905390539057, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.974283614988979e-05, | |
| "loss": 1.4841, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.39163916391639164, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 6.971344599559148e-05, | |
| "loss": 1.5922, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.3927392739273927, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 6.968405584129317e-05, | |
| "loss": 1.556, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.39383938393839385, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 6.965466568699486e-05, | |
| "loss": 1.7782, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.3949394939493949, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 6.962527553269655e-05, | |
| "loss": 1.4288, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.39603960396039606, | |
| "grad_norm": 2.625, | |
| "learning_rate": 6.959588537839824e-05, | |
| "loss": 1.6686, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.39713971397139713, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 6.956649522409993e-05, | |
| "loss": 1.6766, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.39823982398239827, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 6.953710506980162e-05, | |
| "loss": 1.4964, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.39933993399339934, | |
| "grad_norm": 2.5, | |
| "learning_rate": 6.950771491550331e-05, | |
| "loss": 1.8088, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.4004400440044004, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 6.9478324761205e-05, | |
| "loss": 1.746, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.40154015401540155, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 6.94489346069067e-05, | |
| "loss": 1.8511, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.40264026402640263, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 6.941954445260838e-05, | |
| "loss": 1.5927, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.40374037403740376, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 6.939015429831008e-05, | |
| "loss": 1.6274, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.40484048404840484, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 6.936076414401177e-05, | |
| "loss": 1.7494, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.40594059405940597, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 6.933137398971344e-05, | |
| "loss": 1.3937, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.40704070407040704, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 6.930198383541515e-05, | |
| "loss": 1.6917, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4081408140814081, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 6.927259368111684e-05, | |
| "loss": 1.7453, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.40924092409240925, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.924320352681853e-05, | |
| "loss": 1.6261, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.41034103410341033, | |
| "grad_norm": 2.375, | |
| "learning_rate": 6.92138133725202e-05, | |
| "loss": 1.7451, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.41144114411441146, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 6.918442321822191e-05, | |
| "loss": 1.5594, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.41254125412541254, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 6.91550330639236e-05, | |
| "loss": 1.6174, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4136413641364136, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 6.912564290962528e-05, | |
| "loss": 1.6909, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.41474147414741475, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 6.909625275532697e-05, | |
| "loss": 1.4864, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4158415841584158, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 6.906686260102866e-05, | |
| "loss": 1.6935, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.41694169416941695, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.903747244673035e-05, | |
| "loss": 1.3741, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.41804180418041803, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 6.900808229243204e-05, | |
| "loss": 1.6303, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.41914191419141916, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 6.897869213813373e-05, | |
| "loss": 1.7352, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.42024202420242024, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 6.894930198383542e-05, | |
| "loss": 1.7933, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.4213421342134213, | |
| "grad_norm": 2.5, | |
| "learning_rate": 6.891991182953711e-05, | |
| "loss": 1.7936, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.42244224422442245, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 6.88905216752388e-05, | |
| "loss": 1.8693, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4235423542354235, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 6.886113152094049e-05, | |
| "loss": 1.5018, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.42464246424642466, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 6.883174136664218e-05, | |
| "loss": 1.8069, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.42574257425742573, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 6.880235121234387e-05, | |
| "loss": 1.8319, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.42684268426842686, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 6.877296105804556e-05, | |
| "loss": 1.5653, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.42794279427942794, | |
| "grad_norm": 2.25, | |
| "learning_rate": 6.874357090374725e-05, | |
| "loss": 1.6618, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.429042904290429, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 6.871418074944894e-05, | |
| "loss": 1.8716, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.43014301430143015, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 6.868479059515063e-05, | |
| "loss": 1.5454, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4312431243124312, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.865540044085232e-05, | |
| "loss": 1.8307, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.43234323432343236, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 6.862601028655401e-05, | |
| "loss": 1.6726, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.43344334433443343, | |
| "grad_norm": 2.625, | |
| "learning_rate": 6.859662013225569e-05, | |
| "loss": 1.7123, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.43454345434543457, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.856722997795738e-05, | |
| "loss": 1.5337, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.43564356435643564, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.853783982365909e-05, | |
| "loss": 1.7549, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4367436743674367, | |
| "grad_norm": 2.75, | |
| "learning_rate": 6.850844966936078e-05, | |
| "loss": 1.5537, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.43784378437843785, | |
| "grad_norm": 2.25, | |
| "learning_rate": 6.847905951506245e-05, | |
| "loss": 1.6732, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.4389438943894389, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.844966936076414e-05, | |
| "loss": 1.8663, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.44004400440044006, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 6.842027920646585e-05, | |
| "loss": 1.7585, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.44114411441144114, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 6.839088905216753e-05, | |
| "loss": 1.6756, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.44224422442244227, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 6.836149889786922e-05, | |
| "loss": 1.7382, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.44334433443344334, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 6.83321087435709e-05, | |
| "loss": 1.6183, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 6.83027185892726e-05, | |
| "loss": 1.6384, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 6.827332843497429e-05, | |
| "loss": 1.6465, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.44664466446644663, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 6.824393828067598e-05, | |
| "loss": 1.6681, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.44774477447744776, | |
| "grad_norm": 2.359375, | |
| "learning_rate": 6.821454812637767e-05, | |
| "loss": 1.5549, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.44884488448844884, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.818515797207936e-05, | |
| "loss": 1.7307, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.44994499449944997, | |
| "grad_norm": 2.75, | |
| "learning_rate": 6.815576781778105e-05, | |
| "loss": 1.4536, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.45104510451045104, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 6.812637766348274e-05, | |
| "loss": 1.5995, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4521452145214521, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.809698750918443e-05, | |
| "loss": 1.3394, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.45324532453245325, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 6.806759735488612e-05, | |
| "loss": 1.6423, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.45434543454345433, | |
| "grad_norm": 2.625, | |
| "learning_rate": 6.803820720058781e-05, | |
| "loss": 1.5503, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.45544554455445546, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 6.80088170462895e-05, | |
| "loss": 1.6648, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.45654565456545654, | |
| "grad_norm": 2.25, | |
| "learning_rate": 6.797942689199119e-05, | |
| "loss": 1.6802, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.45764576457645767, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 6.795003673769287e-05, | |
| "loss": 1.5534, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.45874587458745875, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 6.792064658339457e-05, | |
| "loss": 1.8407, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4598459845984598, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 6.789125642909626e-05, | |
| "loss": 1.7772, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.46094609460946095, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 6.786186627479794e-05, | |
| "loss": 1.4179, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.46204620462046203, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 6.783247612049963e-05, | |
| "loss": 1.526, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.46314631463146316, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.780308596620134e-05, | |
| "loss": 1.3918, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.46424642464246424, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.777369581190303e-05, | |
| "loss": 1.5415, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.46534653465346537, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 6.77443056576047e-05, | |
| "loss": 1.434, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.46644664466446645, | |
| "grad_norm": 2.171875, | |
| "learning_rate": 6.77149155033064e-05, | |
| "loss": 1.7102, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.4675467546754675, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 6.76855253490081e-05, | |
| "loss": 1.787, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.46864686468646866, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.765613519470977e-05, | |
| "loss": 1.6816, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.46974697469746973, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.762674504041146e-05, | |
| "loss": 1.4694, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.47084708470847086, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.759735488611316e-05, | |
| "loss": 1.5861, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.47194719471947194, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 6.756796473181485e-05, | |
| "loss": 1.8261, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.4730473047304731, | |
| "grad_norm": 2.625, | |
| "learning_rate": 6.753857457751654e-05, | |
| "loss": 1.6531, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.47414741474147415, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 6.750918442321823e-05, | |
| "loss": 1.5926, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.4752475247524752, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 6.747979426891992e-05, | |
| "loss": 1.8642, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.47634763476347636, | |
| "grad_norm": 2.234375, | |
| "learning_rate": 6.745040411462161e-05, | |
| "loss": 1.4616, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.47744774477447743, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 6.74210139603233e-05, | |
| "loss": 1.6287, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.47854785478547857, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 6.739162380602499e-05, | |
| "loss": 1.6636, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.47964796479647964, | |
| "grad_norm": 2.25, | |
| "learning_rate": 6.736223365172668e-05, | |
| "loss": 1.4892, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.4807480748074808, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 6.733284349742837e-05, | |
| "loss": 1.5949, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.48184818481848185, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 6.730345334313006e-05, | |
| "loss": 1.697, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.4829482948294829, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 6.727406318883175e-05, | |
| "loss": 1.4523, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.48404840484048406, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 6.724467303453344e-05, | |
| "loss": 1.7106, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.48514851485148514, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 6.721528288023512e-05, | |
| "loss": 1.4608, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.48624862486248627, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 6.718589272593682e-05, | |
| "loss": 1.6342, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.48734873487348734, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 6.715650257163851e-05, | |
| "loss": 1.7359, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.4884488448844885, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 6.71271124173402e-05, | |
| "loss": 1.7394, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.48954895489548955, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 6.709772226304188e-05, | |
| "loss": 1.4434, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.49064906490649063, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 6.706833210874357e-05, | |
| "loss": 1.6439, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.49174917491749176, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 6.703894195444527e-05, | |
| "loss": 1.5515, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.49284928492849284, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 6.700955180014695e-05, | |
| "loss": 1.9121, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.49394939493949397, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 6.698016164584864e-05, | |
| "loss": 1.442, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.49504950495049505, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 6.695077149155033e-05, | |
| "loss": 2.0113, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4961496149614962, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 6.692138133725202e-05, | |
| "loss": 1.4442, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.49724972497249725, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 6.689199118295371e-05, | |
| "loss": 1.552, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.49834983498349833, | |
| "grad_norm": 2.25, | |
| "learning_rate": 6.68626010286554e-05, | |
| "loss": 1.3807, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.49944994499449946, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 6.68332108743571e-05, | |
| "loss": 1.5911, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.5005500550055005, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 6.680382072005879e-05, | |
| "loss": 1.6458, | |
| "step": 455 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 2727, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 455, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.7410153055922176e+16, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |