| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5879301400657099, | |
| "eval_steps": 500, | |
| "global_step": 850, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0006916825177243646, | |
| "grad_norm": 0.2954893708229065, | |
| "learning_rate": 0.0, | |
| "loss": 2.3812, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0013833650354487291, | |
| "grad_norm": 0.3372870683670044, | |
| "learning_rate": 1.3698630136986302e-06, | |
| "loss": 2.6562, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0020750475531730937, | |
| "grad_norm": 0.41708794236183167, | |
| "learning_rate": 2.7397260273972604e-06, | |
| "loss": 2.779, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0027667300708974583, | |
| "grad_norm": 0.48284393548965454, | |
| "learning_rate": 4.10958904109589e-06, | |
| "loss": 2.83, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0034584125886218224, | |
| "grad_norm": 0.328521728515625, | |
| "learning_rate": 5.479452054794521e-06, | |
| "loss": 2.3607, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004150095106346187, | |
| "grad_norm": 0.3413754403591156, | |
| "learning_rate": 6.849315068493151e-06, | |
| "loss": 1.9744, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0048417776240705515, | |
| "grad_norm": 0.43425995111465454, | |
| "learning_rate": 8.21917808219178e-06, | |
| "loss": 1.915, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0055334601417949165, | |
| "grad_norm": 0.45184195041656494, | |
| "learning_rate": 9.589041095890411e-06, | |
| "loss": 2.0002, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.006225142659519281, | |
| "grad_norm": 0.3451146185398102, | |
| "learning_rate": 1.0958904109589042e-05, | |
| "loss": 2.5739, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.006916825177243645, | |
| "grad_norm": 0.39400503039360046, | |
| "learning_rate": 1.2328767123287671e-05, | |
| "loss": 2.6139, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00760850769496801, | |
| "grad_norm": 0.4555888772010803, | |
| "learning_rate": 1.3698630136986302e-05, | |
| "loss": 2.4346, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.008300190212692375, | |
| "grad_norm": 0.5475619435310364, | |
| "learning_rate": 1.5068493150684931e-05, | |
| "loss": 1.485, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.008991872730416739, | |
| "grad_norm": 1.219076156616211, | |
| "learning_rate": 1.643835616438356e-05, | |
| "loss": 2.0555, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.009683555248141103, | |
| "grad_norm": 0.3271433711051941, | |
| "learning_rate": 1.780821917808219e-05, | |
| "loss": 2.8418, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.010375237765865467, | |
| "grad_norm": 0.3261394500732422, | |
| "learning_rate": 1.9178082191780822e-05, | |
| "loss": 2.287, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.011066920283589833, | |
| "grad_norm": 0.39713796973228455, | |
| "learning_rate": 2.0547945205479453e-05, | |
| "loss": 2.7331, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.011758602801314197, | |
| "grad_norm": 0.3226444125175476, | |
| "learning_rate": 2.1917808219178083e-05, | |
| "loss": 2.7307, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.012450285319038561, | |
| "grad_norm": 0.4153122007846832, | |
| "learning_rate": 2.328767123287671e-05, | |
| "loss": 2.3229, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.013141967836762926, | |
| "grad_norm": 0.29302462935447693, | |
| "learning_rate": 2.4657534246575342e-05, | |
| "loss": 2.2093, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.01383365035448729, | |
| "grad_norm": 0.42535701394081116, | |
| "learning_rate": 2.6027397260273973e-05, | |
| "loss": 2.3698, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.014525332872211656, | |
| "grad_norm": 0.33567023277282715, | |
| "learning_rate": 2.7397260273972603e-05, | |
| "loss": 2.6456, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.01521701538993602, | |
| "grad_norm": 0.37375620007514954, | |
| "learning_rate": 2.8767123287671234e-05, | |
| "loss": 2.4148, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.015908697907660384, | |
| "grad_norm": 0.2704203128814697, | |
| "learning_rate": 3.0136986301369862e-05, | |
| "loss": 1.6722, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.01660038042538475, | |
| "grad_norm": 0.3946782946586609, | |
| "learning_rate": 3.1506849315068496e-05, | |
| "loss": 2.6759, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.017292062943109112, | |
| "grad_norm": 0.368335098028183, | |
| "learning_rate": 3.287671232876712e-05, | |
| "loss": 2.6882, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.017983745460833478, | |
| "grad_norm": 0.38029783964157104, | |
| "learning_rate": 3.424657534246575e-05, | |
| "loss": 2.6915, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.01867542797855784, | |
| "grad_norm": 0.36253222823143005, | |
| "learning_rate": 3.561643835616438e-05, | |
| "loss": 2.1426, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.019367110496282206, | |
| "grad_norm": 0.35769322514533997, | |
| "learning_rate": 3.698630136986301e-05, | |
| "loss": 2.6634, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.020058793014006572, | |
| "grad_norm": 0.44577229022979736, | |
| "learning_rate": 3.8356164383561644e-05, | |
| "loss": 2.5996, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.020750475531730934, | |
| "grad_norm": 0.5225628614425659, | |
| "learning_rate": 3.9726027397260274e-05, | |
| "loss": 2.5993, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0214421580494553, | |
| "grad_norm": 0.45649707317352295, | |
| "learning_rate": 4.1095890410958905e-05, | |
| "loss": 2.3344, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.022133840567179666, | |
| "grad_norm": 0.31408172845840454, | |
| "learning_rate": 4.2465753424657536e-05, | |
| "loss": 2.343, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.02282552308490403, | |
| "grad_norm": 0.3498693108558655, | |
| "learning_rate": 4.383561643835617e-05, | |
| "loss": 1.7196, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.023517205602628394, | |
| "grad_norm": 0.5255292654037476, | |
| "learning_rate": 4.520547945205479e-05, | |
| "loss": 1.5354, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.024208888120352757, | |
| "grad_norm": 0.3460487425327301, | |
| "learning_rate": 4.657534246575342e-05, | |
| "loss": 2.1435, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.024900570638077123, | |
| "grad_norm": 0.47466233372688293, | |
| "learning_rate": 4.794520547945205e-05, | |
| "loss": 2.7025, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.02559225315580149, | |
| "grad_norm": 0.39667466282844543, | |
| "learning_rate": 4.9315068493150684e-05, | |
| "loss": 2.2736, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.02628393567352585, | |
| "grad_norm": 0.40798133611679077, | |
| "learning_rate": 5.068493150684932e-05, | |
| "loss": 1.4074, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.026975618191250217, | |
| "grad_norm": 0.39603471755981445, | |
| "learning_rate": 5.2054794520547945e-05, | |
| "loss": 2.4354, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.02766730070897458, | |
| "grad_norm": 0.5650593042373657, | |
| "learning_rate": 5.342465753424658e-05, | |
| "loss": 1.7307, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.028358983226698945, | |
| "grad_norm": 0.7711919546127319, | |
| "learning_rate": 5.479452054794521e-05, | |
| "loss": 1.2067, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.02905066574442331, | |
| "grad_norm": 0.46603119373321533, | |
| "learning_rate": 5.616438356164384e-05, | |
| "loss": 1.0539, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.029742348262147673, | |
| "grad_norm": 0.2997550666332245, | |
| "learning_rate": 5.753424657534247e-05, | |
| "loss": 1.9997, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.03043403077987204, | |
| "grad_norm": 0.4058617651462555, | |
| "learning_rate": 5.89041095890411e-05, | |
| "loss": 2.3323, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.0311257132975964, | |
| "grad_norm": 0.3763635754585266, | |
| "learning_rate": 6.0273972602739724e-05, | |
| "loss": 1.8749, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.03181739581532077, | |
| "grad_norm": 0.3507993221282959, | |
| "learning_rate": 6.164383561643835e-05, | |
| "loss": 2.3828, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.03250907833304513, | |
| "grad_norm": 0.34240859746932983, | |
| "learning_rate": 6.301369863013699e-05, | |
| "loss": 2.3809, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.0332007608507695, | |
| "grad_norm": 0.4183844029903412, | |
| "learning_rate": 6.438356164383562e-05, | |
| "loss": 2.2065, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.03389244336849386, | |
| "grad_norm": 0.5209120512008667, | |
| "learning_rate": 6.575342465753424e-05, | |
| "loss": 2.061, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.034584125886218224, | |
| "grad_norm": 0.6568111181259155, | |
| "learning_rate": 6.712328767123288e-05, | |
| "loss": 2.1576, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03527580840394259, | |
| "grad_norm": 0.5327390432357788, | |
| "learning_rate": 6.84931506849315e-05, | |
| "loss": 1.5562, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.035967490921666956, | |
| "grad_norm": 0.6389997005462646, | |
| "learning_rate": 6.986301369863014e-05, | |
| "loss": 2.3966, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.03665917343939132, | |
| "grad_norm": 0.6374388933181763, | |
| "learning_rate": 7.123287671232876e-05, | |
| "loss": 2.4013, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.03735085595711568, | |
| "grad_norm": 0.4383719563484192, | |
| "learning_rate": 7.26027397260274e-05, | |
| "loss": 2.061, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.03804253847484005, | |
| "grad_norm": 0.4306814670562744, | |
| "learning_rate": 7.397260273972603e-05, | |
| "loss": 2.4931, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.03873422099256441, | |
| "grad_norm": 0.5693032145500183, | |
| "learning_rate": 7.534246575342466e-05, | |
| "loss": 2.2842, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.039425903510288775, | |
| "grad_norm": 0.7792916297912598, | |
| "learning_rate": 7.671232876712329e-05, | |
| "loss": 1.8452, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.040117586028013144, | |
| "grad_norm": 0.4936494827270508, | |
| "learning_rate": 7.808219178082192e-05, | |
| "loss": 1.8668, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.040809268545737507, | |
| "grad_norm": 0.49626269936561584, | |
| "learning_rate": 7.945205479452055e-05, | |
| "loss": 1.9564, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.04150095106346187, | |
| "grad_norm": 0.4702766537666321, | |
| "learning_rate": 8.082191780821919e-05, | |
| "loss": 2.419, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04219263358118624, | |
| "grad_norm": 0.6959900259971619, | |
| "learning_rate": 8.219178082191781e-05, | |
| "loss": 1.5835, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.0428843160989106, | |
| "grad_norm": 0.5963976979255676, | |
| "learning_rate": 8.356164383561645e-05, | |
| "loss": 2.2932, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.04357599861663496, | |
| "grad_norm": 0.4605785310268402, | |
| "learning_rate": 8.493150684931507e-05, | |
| "loss": 2.397, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.04426768113435933, | |
| "grad_norm": 0.4479796588420868, | |
| "learning_rate": 8.630136986301371e-05, | |
| "loss": 2.0257, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.044959363652083695, | |
| "grad_norm": 0.4342099726200104, | |
| "learning_rate": 8.767123287671233e-05, | |
| "loss": 1.9476, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04565104616980806, | |
| "grad_norm": 0.44643279910087585, | |
| "learning_rate": 8.904109589041096e-05, | |
| "loss": 2.4669, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.04634272868753242, | |
| "grad_norm": 0.513254702091217, | |
| "learning_rate": 9.041095890410958e-05, | |
| "loss": 2.2581, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.04703441120525679, | |
| "grad_norm": 0.6621958613395691, | |
| "learning_rate": 9.178082191780822e-05, | |
| "loss": 1.3013, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.04772609372298115, | |
| "grad_norm": 0.43388229608535767, | |
| "learning_rate": 9.315068493150684e-05, | |
| "loss": 2.5325, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.048417776240705514, | |
| "grad_norm": 0.539757490158081, | |
| "learning_rate": 9.452054794520548e-05, | |
| "loss": 1.76, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04910945875842988, | |
| "grad_norm": 0.33598029613494873, | |
| "learning_rate": 9.58904109589041e-05, | |
| "loss": 1.6931, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.049801141276154245, | |
| "grad_norm": 0.8661749958992004, | |
| "learning_rate": 9.726027397260274e-05, | |
| "loss": 2.1326, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.05049282379387861, | |
| "grad_norm": 0.5956202149391174, | |
| "learning_rate": 9.863013698630137e-05, | |
| "loss": 2.0554, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.05118450631160298, | |
| "grad_norm": 0.7723128199577332, | |
| "learning_rate": 0.0001, | |
| "loss": 1.4837, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.05187618882932734, | |
| "grad_norm": 0.5357879996299744, | |
| "learning_rate": 9.992716678805537e-05, | |
| "loss": 2.1688, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.0525678713470517, | |
| "grad_norm": 0.48623108863830566, | |
| "learning_rate": 9.985433357611071e-05, | |
| "loss": 2.1567, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.053259553864776064, | |
| "grad_norm": 0.4244164228439331, | |
| "learning_rate": 9.978150036416607e-05, | |
| "loss": 1.8235, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.053951236382500434, | |
| "grad_norm": 0.6056337356567383, | |
| "learning_rate": 9.970866715222141e-05, | |
| "loss": 2.0154, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.054642918900224796, | |
| "grad_norm": 0.5280382037162781, | |
| "learning_rate": 9.963583394027677e-05, | |
| "loss": 1.9959, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.05533460141794916, | |
| "grad_norm": 0.44745901226997375, | |
| "learning_rate": 9.956300072833212e-05, | |
| "loss": 1.2171, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05602628393567353, | |
| "grad_norm": 0.7180289030075073, | |
| "learning_rate": 9.949016751638748e-05, | |
| "loss": 1.7036, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.05671796645339789, | |
| "grad_norm": 0.6465222239494324, | |
| "learning_rate": 9.941733430444284e-05, | |
| "loss": 1.8992, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.05740964897112225, | |
| "grad_norm": 0.4792153239250183, | |
| "learning_rate": 9.934450109249819e-05, | |
| "loss": 2.0712, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.05810133148884662, | |
| "grad_norm": 0.47839808464050293, | |
| "learning_rate": 9.927166788055353e-05, | |
| "loss": 1.7548, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.058793014006570984, | |
| "grad_norm": 0.7118557095527649, | |
| "learning_rate": 9.919883466860888e-05, | |
| "loss": 1.5835, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.05948469652429535, | |
| "grad_norm": 0.5179592370986938, | |
| "learning_rate": 9.912600145666424e-05, | |
| "loss": 1.621, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.060176379042019716, | |
| "grad_norm": 0.9235703349113464, | |
| "learning_rate": 9.905316824471959e-05, | |
| "loss": 0.8581, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.06086806155974408, | |
| "grad_norm": 0.8734163045883179, | |
| "learning_rate": 9.898033503277495e-05, | |
| "loss": 2.2878, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.06155974407746844, | |
| "grad_norm": 0.45049625635147095, | |
| "learning_rate": 9.890750182083031e-05, | |
| "loss": 2.2186, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.0622514265951928, | |
| "grad_norm": 0.6119429469108582, | |
| "learning_rate": 9.883466860888566e-05, | |
| "loss": 2.2461, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06294310911291717, | |
| "grad_norm": 0.5095340013504028, | |
| "learning_rate": 9.876183539694101e-05, | |
| "loss": 2.2954, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.06363479163064154, | |
| "grad_norm": 0.6094918251037598, | |
| "learning_rate": 9.868900218499635e-05, | |
| "loss": 2.3478, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.0643264741483659, | |
| "grad_norm": 1.8420301675796509, | |
| "learning_rate": 9.861616897305172e-05, | |
| "loss": 2.213, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.06501815666609026, | |
| "grad_norm": 0.6151532530784607, | |
| "learning_rate": 9.854333576110706e-05, | |
| "loss": 1.8737, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.06570983918381462, | |
| "grad_norm": 0.5210903286933899, | |
| "learning_rate": 9.847050254916242e-05, | |
| "loss": 2.019, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.066401521701539, | |
| "grad_norm": 0.535746693611145, | |
| "learning_rate": 9.839766933721779e-05, | |
| "loss": 2.399, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.06709320421926336, | |
| "grad_norm": 0.8154505491256714, | |
| "learning_rate": 9.832483612527313e-05, | |
| "loss": 1.9442, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.06778488673698772, | |
| "grad_norm": 0.521247923374176, | |
| "learning_rate": 9.825200291332848e-05, | |
| "loss": 2.5264, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.06847656925471209, | |
| "grad_norm": 0.41544124484062195, | |
| "learning_rate": 9.817916970138383e-05, | |
| "loss": 1.8267, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.06916825177243645, | |
| "grad_norm": 0.538253128528595, | |
| "learning_rate": 9.810633648943919e-05, | |
| "loss": 1.6463, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06985993429016081, | |
| "grad_norm": 0.8509485125541687, | |
| "learning_rate": 9.803350327749454e-05, | |
| "loss": 1.5877, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.07055161680788519, | |
| "grad_norm": 0.47714293003082275, | |
| "learning_rate": 9.79606700655499e-05, | |
| "loss": 1.5043, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.07124329932560955, | |
| "grad_norm": 0.5433914065361023, | |
| "learning_rate": 9.788783685360526e-05, | |
| "loss": 1.6454, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.07193498184333391, | |
| "grad_norm": 0.6400073766708374, | |
| "learning_rate": 9.781500364166059e-05, | |
| "loss": 1.6994, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.07262666436105827, | |
| "grad_norm": 0.6944612860679626, | |
| "learning_rate": 9.774217042971595e-05, | |
| "loss": 2.2063, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.07331834687878264, | |
| "grad_norm": 0.6701168417930603, | |
| "learning_rate": 9.76693372177713e-05, | |
| "loss": 1.2487, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.074010029396507, | |
| "grad_norm": 0.7888538837432861, | |
| "learning_rate": 9.759650400582666e-05, | |
| "loss": 1.8784, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.07470171191423136, | |
| "grad_norm": 0.7106865048408508, | |
| "learning_rate": 9.752367079388201e-05, | |
| "loss": 2.3673, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.07539339443195574, | |
| "grad_norm": 0.49588626623153687, | |
| "learning_rate": 9.745083758193737e-05, | |
| "loss": 1.8638, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.0760850769496801, | |
| "grad_norm": 0.48046427965164185, | |
| "learning_rate": 9.737800436999273e-05, | |
| "loss": 2.333, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07677675946740446, | |
| "grad_norm": 0.6239060163497925, | |
| "learning_rate": 9.730517115804807e-05, | |
| "loss": 1.8378, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.07746844198512882, | |
| "grad_norm": 0.6083034873008728, | |
| "learning_rate": 9.723233794610343e-05, | |
| "loss": 1.9826, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.07816012450285319, | |
| "grad_norm": 0.8679235577583313, | |
| "learning_rate": 9.715950473415877e-05, | |
| "loss": 1.3375, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.07885180702057755, | |
| "grad_norm": 0.542757511138916, | |
| "learning_rate": 9.708667152221414e-05, | |
| "loss": 1.6349, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.07954348953830193, | |
| "grad_norm": 0.5968992114067078, | |
| "learning_rate": 9.701383831026948e-05, | |
| "loss": 1.3598, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.08023517205602629, | |
| "grad_norm": 0.5693714022636414, | |
| "learning_rate": 9.694100509832484e-05, | |
| "loss": 2.1527, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.08092685457375065, | |
| "grad_norm": 0.6307690143585205, | |
| "learning_rate": 9.68681718863802e-05, | |
| "loss": 1.7178, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.08161853709147501, | |
| "grad_norm": 0.5192516446113586, | |
| "learning_rate": 9.679533867443554e-05, | |
| "loss": 1.8115, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.08231021960919938, | |
| "grad_norm": 1.036036729812622, | |
| "learning_rate": 9.67225054624909e-05, | |
| "loss": 2.2423, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.08300190212692374, | |
| "grad_norm": 0.6450679302215576, | |
| "learning_rate": 9.664967225054625e-05, | |
| "loss": 1.9298, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0836935846446481, | |
| "grad_norm": 0.8683953881263733, | |
| "learning_rate": 9.657683903860161e-05, | |
| "loss": 2.0791, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.08438526716237248, | |
| "grad_norm": 0.7013575434684753, | |
| "learning_rate": 9.650400582665696e-05, | |
| "loss": 2.0814, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.08507694968009684, | |
| "grad_norm": 0.6849325299263, | |
| "learning_rate": 9.643117261471232e-05, | |
| "loss": 1.83, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.0857686321978212, | |
| "grad_norm": 0.9820392727851868, | |
| "learning_rate": 9.635833940276767e-05, | |
| "loss": 1.5665, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.08646031471554556, | |
| "grad_norm": 0.5546866655349731, | |
| "learning_rate": 9.628550619082301e-05, | |
| "loss": 1.7215, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.08715199723326993, | |
| "grad_norm": 0.6748363971710205, | |
| "learning_rate": 9.621267297887837e-05, | |
| "loss": 1.6532, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.08784367975099429, | |
| "grad_norm": 0.4760434627532959, | |
| "learning_rate": 9.613983976693372e-05, | |
| "loss": 1.3476, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.08853536226871866, | |
| "grad_norm": 0.5710906386375427, | |
| "learning_rate": 9.606700655498908e-05, | |
| "loss": 2.025, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.08922704478644303, | |
| "grad_norm": 0.5950302481651306, | |
| "learning_rate": 9.599417334304443e-05, | |
| "loss": 2.6324, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.08991872730416739, | |
| "grad_norm": 0.4994860887527466, | |
| "learning_rate": 9.592134013109979e-05, | |
| "loss": 2.0315, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.09061040982189175, | |
| "grad_norm": 0.6400249004364014, | |
| "learning_rate": 9.584850691915514e-05, | |
| "loss": 2.3628, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.09130209233961611, | |
| "grad_norm": 1.0873314142227173, | |
| "learning_rate": 9.577567370721049e-05, | |
| "loss": 1.2664, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.09199377485734048, | |
| "grad_norm": 0.5430288314819336, | |
| "learning_rate": 9.570284049526585e-05, | |
| "loss": 1.7676, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.09268545737506484, | |
| "grad_norm": 0.5969283580780029, | |
| "learning_rate": 9.56300072833212e-05, | |
| "loss": 2.0824, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.09337713989278922, | |
| "grad_norm": 0.6228020191192627, | |
| "learning_rate": 9.555717407137656e-05, | |
| "loss": 2.0687, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.09406882241051358, | |
| "grad_norm": 0.6905536651611328, | |
| "learning_rate": 9.54843408594319e-05, | |
| "loss": 2.2948, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.09476050492823794, | |
| "grad_norm": 0.6397948861122131, | |
| "learning_rate": 9.541150764748726e-05, | |
| "loss": 1.4147, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.0954521874459623, | |
| "grad_norm": 0.5489908456802368, | |
| "learning_rate": 9.533867443554261e-05, | |
| "loss": 2.0582, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.09614386996368667, | |
| "grad_norm": 0.5713220834732056, | |
| "learning_rate": 9.526584122359796e-05, | |
| "loss": 2.1708, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.09683555248141103, | |
| "grad_norm": 0.6736558079719543, | |
| "learning_rate": 9.519300801165332e-05, | |
| "loss": 1.6808, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0975272349991354, | |
| "grad_norm": 1.1222553253173828, | |
| "learning_rate": 9.512017479970867e-05, | |
| "loss": 1.563, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.09821891751685977, | |
| "grad_norm": 0.6231582760810852, | |
| "learning_rate": 9.504734158776403e-05, | |
| "loss": 1.6458, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.09891060003458413, | |
| "grad_norm": 0.716089129447937, | |
| "learning_rate": 9.497450837581938e-05, | |
| "loss": 1.7055, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.09960228255230849, | |
| "grad_norm": 0.49629953503608704, | |
| "learning_rate": 9.490167516387472e-05, | |
| "loss": 1.2985, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.10029396507003285, | |
| "grad_norm": 0.8354843258857727, | |
| "learning_rate": 9.482884195193008e-05, | |
| "loss": 1.8557, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.10098564758775722, | |
| "grad_norm": 0.7507709860801697, | |
| "learning_rate": 9.475600873998543e-05, | |
| "loss": 1.1816, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.10167733010548158, | |
| "grad_norm": 0.8973211050033569, | |
| "learning_rate": 9.46831755280408e-05, | |
| "loss": 1.4495, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.10236901262320595, | |
| "grad_norm": 0.7720927000045776, | |
| "learning_rate": 9.461034231609614e-05, | |
| "loss": 2.1743, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.10306069514093032, | |
| "grad_norm": 0.511075496673584, | |
| "learning_rate": 9.45375091041515e-05, | |
| "loss": 1.8267, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.10375237765865468, | |
| "grad_norm": 0.6523205041885376, | |
| "learning_rate": 9.446467589220685e-05, | |
| "loss": 1.9682, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.10444406017637904, | |
| "grad_norm": 0.703565239906311, | |
| "learning_rate": 9.43918426802622e-05, | |
| "loss": 2.3272, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.1051357426941034, | |
| "grad_norm": 0.5773376226425171, | |
| "learning_rate": 9.431900946831756e-05, | |
| "loss": 1.9753, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.10582742521182777, | |
| "grad_norm": 0.6234054565429688, | |
| "learning_rate": 9.42461762563729e-05, | |
| "loss": 2.5085, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.10651910772955213, | |
| "grad_norm": 0.7076170444488525, | |
| "learning_rate": 9.417334304442827e-05, | |
| "loss": 1.9139, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1072107902472765, | |
| "grad_norm": 0.5035070776939392, | |
| "learning_rate": 9.410050983248361e-05, | |
| "loss": 2.0164, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.10790247276500087, | |
| "grad_norm": 1.046075701713562, | |
| "learning_rate": 9.402767662053898e-05, | |
| "loss": 1.2478, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.10859415528272523, | |
| "grad_norm": 0.5620921850204468, | |
| "learning_rate": 9.395484340859432e-05, | |
| "loss": 1.565, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.10928583780044959, | |
| "grad_norm": 0.8284108638763428, | |
| "learning_rate": 9.388201019664967e-05, | |
| "loss": 2.0823, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.10997752031817395, | |
| "grad_norm": 0.584373950958252, | |
| "learning_rate": 9.380917698470503e-05, | |
| "loss": 1.6352, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.11066920283589832, | |
| "grad_norm": 0.7779399752616882, | |
| "learning_rate": 9.373634377276038e-05, | |
| "loss": 1.83, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1113608853536227, | |
| "grad_norm": 0.6635437607765198, | |
| "learning_rate": 9.366351056081574e-05, | |
| "loss": 1.8935, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.11205256787134706, | |
| "grad_norm": 0.6441919207572937, | |
| "learning_rate": 9.359067734887109e-05, | |
| "loss": 1.9566, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.11274425038907142, | |
| "grad_norm": 1.20408296585083, | |
| "learning_rate": 9.351784413692645e-05, | |
| "loss": 1.2529, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.11343593290679578, | |
| "grad_norm": 0.6320855021476746, | |
| "learning_rate": 9.34450109249818e-05, | |
| "loss": 1.8071, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.11412761542452014, | |
| "grad_norm": 0.5197457075119019, | |
| "learning_rate": 9.337217771303714e-05, | |
| "loss": 1.7303, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1148192979422445, | |
| "grad_norm": 0.6230719089508057, | |
| "learning_rate": 9.32993445010925e-05, | |
| "loss": 2.0685, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.11551098045996887, | |
| "grad_norm": 0.6185227036476135, | |
| "learning_rate": 9.322651128914785e-05, | |
| "loss": 1.9485, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.11620266297769324, | |
| "grad_norm": 0.7290086150169373, | |
| "learning_rate": 9.315367807720321e-05, | |
| "loss": 1.6923, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.1168943454954176, | |
| "grad_norm": 0.9122477769851685, | |
| "learning_rate": 9.308084486525856e-05, | |
| "loss": 1.3395, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.11758602801314197, | |
| "grad_norm": 0.5755578875541687, | |
| "learning_rate": 9.300801165331392e-05, | |
| "loss": 1.0775, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.11827771053086633, | |
| "grad_norm": 0.6516758799552917, | |
| "learning_rate": 9.293517844136927e-05, | |
| "loss": 1.6174, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.1189693930485907, | |
| "grad_norm": 0.5685858726501465, | |
| "learning_rate": 9.286234522942462e-05, | |
| "loss": 1.851, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.11966107556631506, | |
| "grad_norm": 0.5784198641777039, | |
| "learning_rate": 9.278951201747998e-05, | |
| "loss": 1.712, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.12035275808403943, | |
| "grad_norm": 0.7357056736946106, | |
| "learning_rate": 9.271667880553533e-05, | |
| "loss": 2.2286, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.1210444406017638, | |
| "grad_norm": 0.5122838616371155, | |
| "learning_rate": 9.264384559359069e-05, | |
| "loss": 1.5581, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.12173612311948816, | |
| "grad_norm": 0.6864873170852661, | |
| "learning_rate": 9.257101238164603e-05, | |
| "loss": 1.2737, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.12242780563721252, | |
| "grad_norm": 0.441771000623703, | |
| "learning_rate": 9.24981791697014e-05, | |
| "loss": 1.2291, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.12311948815493688, | |
| "grad_norm": 0.634203314781189, | |
| "learning_rate": 9.242534595775674e-05, | |
| "loss": 2.1797, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.12381117067266124, | |
| "grad_norm": 0.6232409477233887, | |
| "learning_rate": 9.235251274581209e-05, | |
| "loss": 1.5954, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.1245028531903856, | |
| "grad_norm": 0.7475627660751343, | |
| "learning_rate": 9.227967953386745e-05, | |
| "loss": 2.3014, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.12519453570810998, | |
| "grad_norm": 0.7436168789863586, | |
| "learning_rate": 9.22068463219228e-05, | |
| "loss": 2.1053, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.12588621822583435, | |
| "grad_norm": 0.754830002784729, | |
| "learning_rate": 9.213401310997816e-05, | |
| "loss": 2.2968, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.1265779007435587, | |
| "grad_norm": 0.7968791723251343, | |
| "learning_rate": 9.206117989803351e-05, | |
| "loss": 1.9433, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.12726958326128307, | |
| "grad_norm": 0.7031124234199524, | |
| "learning_rate": 9.198834668608885e-05, | |
| "loss": 1.6162, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.12796126577900743, | |
| "grad_norm": 1.5919851064682007, | |
| "learning_rate": 9.191551347414422e-05, | |
| "loss": 2.3995, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.1286529482967318, | |
| "grad_norm": 0.8990558385848999, | |
| "learning_rate": 9.184268026219956e-05, | |
| "loss": 2.1597, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.12934463081445616, | |
| "grad_norm": 0.6514426469802856, | |
| "learning_rate": 9.176984705025492e-05, | |
| "loss": 2.2172, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.13003631333218052, | |
| "grad_norm": 0.5041804313659668, | |
| "learning_rate": 9.169701383831027e-05, | |
| "loss": 1.6619, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.13072799584990488, | |
| "grad_norm": 0.5292882323265076, | |
| "learning_rate": 9.162418062636563e-05, | |
| "loss": 1.3204, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.13141967836762924, | |
| "grad_norm": 0.9279568791389465, | |
| "learning_rate": 9.155134741442098e-05, | |
| "loss": 1.2023, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.13211136088535363, | |
| "grad_norm": 0.8143125772476196, | |
| "learning_rate": 9.147851420247633e-05, | |
| "loss": 1.5768, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.132803043403078, | |
| "grad_norm": 0.9918676614761353, | |
| "learning_rate": 9.140568099053169e-05, | |
| "loss": 1.3813, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.13349472592080236, | |
| "grad_norm": 0.6452721357345581, | |
| "learning_rate": 9.133284777858704e-05, | |
| "loss": 2.4017, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.13418640843852672, | |
| "grad_norm": 0.7932286858558655, | |
| "learning_rate": 9.12600145666424e-05, | |
| "loss": 1.1203, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.13487809095625108, | |
| "grad_norm": 0.5378614068031311, | |
| "learning_rate": 9.118718135469774e-05, | |
| "loss": 1.9103, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.13556977347397545, | |
| "grad_norm": 0.6193660497665405, | |
| "learning_rate": 9.11143481427531e-05, | |
| "loss": 2.0729, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.1362614559916998, | |
| "grad_norm": 0.8437771797180176, | |
| "learning_rate": 9.104151493080845e-05, | |
| "loss": 2.4214, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.13695313850942417, | |
| "grad_norm": 0.8080397248268127, | |
| "learning_rate": 9.09686817188638e-05, | |
| "loss": 1.2323, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.13764482102714853, | |
| "grad_norm": 0.5907486081123352, | |
| "learning_rate": 9.089584850691916e-05, | |
| "loss": 1.6866, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.1383365035448729, | |
| "grad_norm": 0.6559591293334961, | |
| "learning_rate": 9.082301529497451e-05, | |
| "loss": 2.1268, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.13902818606259726, | |
| "grad_norm": 0.6248623728752136, | |
| "learning_rate": 9.075018208302987e-05, | |
| "loss": 1.2631, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.13971986858032162, | |
| "grad_norm": 0.5625057220458984, | |
| "learning_rate": 9.067734887108522e-05, | |
| "loss": 1.1348, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.14041155109804598, | |
| "grad_norm": 0.5977293252944946, | |
| "learning_rate": 9.060451565914058e-05, | |
| "loss": 1.136, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.14110323361577037, | |
| "grad_norm": 0.6973084211349487, | |
| "learning_rate": 9.053168244719593e-05, | |
| "loss": 2.0764, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.14179491613349474, | |
| "grad_norm": 0.761034369468689, | |
| "learning_rate": 9.045884923525127e-05, | |
| "loss": 2.4017, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1424865986512191, | |
| "grad_norm": 0.7098332047462463, | |
| "learning_rate": 9.038601602330664e-05, | |
| "loss": 1.6705, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.14317828116894346, | |
| "grad_norm": 0.6458803415298462, | |
| "learning_rate": 9.031318281136198e-05, | |
| "loss": 1.2114, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.14386996368666782, | |
| "grad_norm": 0.6271463632583618, | |
| "learning_rate": 9.024034959941734e-05, | |
| "loss": 1.6756, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.14456164620439219, | |
| "grad_norm": 0.6152657866477966, | |
| "learning_rate": 9.016751638747269e-05, | |
| "loss": 1.5943, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.14525332872211655, | |
| "grad_norm": 0.6465914845466614, | |
| "learning_rate": 9.009468317552805e-05, | |
| "loss": 1.8369, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1459450112398409, | |
| "grad_norm": 2.220533847808838, | |
| "learning_rate": 9.00218499635834e-05, | |
| "loss": 2.7842, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.14663669375756527, | |
| "grad_norm": 0.562271773815155, | |
| "learning_rate": 8.994901675163875e-05, | |
| "loss": 1.9363, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.14732837627528964, | |
| "grad_norm": 0.646512508392334, | |
| "learning_rate": 8.987618353969411e-05, | |
| "loss": 1.9429, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.148020058793014, | |
| "grad_norm": 0.8589336276054382, | |
| "learning_rate": 8.980335032774946e-05, | |
| "loss": 1.6978, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.14871174131073836, | |
| "grad_norm": 0.7746891975402832, | |
| "learning_rate": 8.973051711580482e-05, | |
| "loss": 1.1804, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.14940342382846272, | |
| "grad_norm": 0.809123158454895, | |
| "learning_rate": 8.965768390386016e-05, | |
| "loss": 1.5176, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.1500951063461871, | |
| "grad_norm": 0.6705849170684814, | |
| "learning_rate": 8.958485069191553e-05, | |
| "loss": 2.1671, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.15078678886391148, | |
| "grad_norm": 0.5576826333999634, | |
| "learning_rate": 8.951201747997087e-05, | |
| "loss": 1.2096, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.15147847138163584, | |
| "grad_norm": 0.8005768060684204, | |
| "learning_rate": 8.943918426802622e-05, | |
| "loss": 2.3528, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.1521701538993602, | |
| "grad_norm": 0.7752550840377808, | |
| "learning_rate": 8.936635105608158e-05, | |
| "loss": 1.7417, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.15286183641708456, | |
| "grad_norm": 0.819672703742981, | |
| "learning_rate": 8.929351784413693e-05, | |
| "loss": 2.0707, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.15355351893480892, | |
| "grad_norm": 1.488770842552185, | |
| "learning_rate": 8.922068463219229e-05, | |
| "loss": 1.7984, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.1542452014525333, | |
| "grad_norm": 0.7640134692192078, | |
| "learning_rate": 8.914785142024764e-05, | |
| "loss": 1.858, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.15493688397025765, | |
| "grad_norm": 0.8346788883209229, | |
| "learning_rate": 8.907501820830299e-05, | |
| "loss": 1.7544, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.155628566487982, | |
| "grad_norm": 0.5825446844100952, | |
| "learning_rate": 8.900218499635835e-05, | |
| "loss": 1.8188, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.15632024900570637, | |
| "grad_norm": 0.7010117173194885, | |
| "learning_rate": 8.89293517844137e-05, | |
| "loss": 1.9475, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.15701193152343074, | |
| "grad_norm": 0.5123314261436462, | |
| "learning_rate": 8.885651857246906e-05, | |
| "loss": 2.1235, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.1577036140411551, | |
| "grad_norm": 0.62752366065979, | |
| "learning_rate": 8.87836853605244e-05, | |
| "loss": 1.872, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.15839529655887946, | |
| "grad_norm": 0.6518980264663696, | |
| "learning_rate": 8.871085214857976e-05, | |
| "loss": 1.4556, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.15908697907660385, | |
| "grad_norm": 0.7556332349777222, | |
| "learning_rate": 8.863801893663511e-05, | |
| "loss": 2.0122, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.15977866159432821, | |
| "grad_norm": 0.6503921747207642, | |
| "learning_rate": 8.856518572469046e-05, | |
| "loss": 2.0475, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.16047034411205258, | |
| "grad_norm": 0.7834317088127136, | |
| "learning_rate": 8.849235251274582e-05, | |
| "loss": 1.8671, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.16116202662977694, | |
| "grad_norm": 2.123568534851074, | |
| "learning_rate": 8.841951930080117e-05, | |
| "loss": 1.4212, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.1618537091475013, | |
| "grad_norm": 0.5151258111000061, | |
| "learning_rate": 8.834668608885653e-05, | |
| "loss": 1.1815, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.16254539166522566, | |
| "grad_norm": 0.6445286273956299, | |
| "learning_rate": 8.827385287691188e-05, | |
| "loss": 2.1009, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.16323707418295003, | |
| "grad_norm": 0.9389359951019287, | |
| "learning_rate": 8.820101966496724e-05, | |
| "loss": 2.3638, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.1639287567006744, | |
| "grad_norm": 0.7288535833358765, | |
| "learning_rate": 8.812818645302258e-05, | |
| "loss": 1.5602, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.16462043921839875, | |
| "grad_norm": 0.5802240967750549, | |
| "learning_rate": 8.805535324107793e-05, | |
| "loss": 1.8588, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.1653121217361231, | |
| "grad_norm": 0.6751568913459778, | |
| "learning_rate": 8.798252002913329e-05, | |
| "loss": 2.0206, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.16600380425384748, | |
| "grad_norm": 0.6931467056274414, | |
| "learning_rate": 8.790968681718864e-05, | |
| "loss": 1.9663, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.16669548677157184, | |
| "grad_norm": 0.6931905150413513, | |
| "learning_rate": 8.7836853605244e-05, | |
| "loss": 1.5422, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.1673871692892962, | |
| "grad_norm": 0.7654033899307251, | |
| "learning_rate": 8.776402039329935e-05, | |
| "loss": 2.2165, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.1680788518070206, | |
| "grad_norm": 0.8461145162582397, | |
| "learning_rate": 8.769118718135471e-05, | |
| "loss": 2.1441, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.16877053432474495, | |
| "grad_norm": 0.6745240092277527, | |
| "learning_rate": 8.761835396941004e-05, | |
| "loss": 2.0226, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.16946221684246932, | |
| "grad_norm": 0.9059496521949768, | |
| "learning_rate": 8.75455207574654e-05, | |
| "loss": 1.3178, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.17015389936019368, | |
| "grad_norm": 0.7393327951431274, | |
| "learning_rate": 8.747268754552075e-05, | |
| "loss": 1.7787, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.17084558187791804, | |
| "grad_norm": 1.1192548274993896, | |
| "learning_rate": 8.739985433357611e-05, | |
| "loss": 1.2677, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.1715372643956424, | |
| "grad_norm": 0.500165581703186, | |
| "learning_rate": 8.732702112163147e-05, | |
| "loss": 1.6749, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.17222894691336676, | |
| "grad_norm": 0.7118367552757263, | |
| "learning_rate": 8.725418790968682e-05, | |
| "loss": 1.6768, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.17292062943109113, | |
| "grad_norm": 0.6697540283203125, | |
| "learning_rate": 8.718135469774218e-05, | |
| "loss": 1.7236, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1736123119488155, | |
| "grad_norm": 0.621541440486908, | |
| "learning_rate": 8.710852148579752e-05, | |
| "loss": 1.1761, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.17430399446653985, | |
| "grad_norm": 1.05433189868927, | |
| "learning_rate": 8.703568827385288e-05, | |
| "loss": 1.5783, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.17499567698426421, | |
| "grad_norm": 0.7111071944236755, | |
| "learning_rate": 8.696285506190823e-05, | |
| "loss": 1.9291, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.17568735950198858, | |
| "grad_norm": 0.7538565397262573, | |
| "learning_rate": 8.689002184996359e-05, | |
| "loss": 1.5598, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.17637904201971294, | |
| "grad_norm": 0.7115334868431091, | |
| "learning_rate": 8.681718863801895e-05, | |
| "loss": 1.7721, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.17707072453743733, | |
| "grad_norm": 0.8842628002166748, | |
| "learning_rate": 8.67443554260743e-05, | |
| "loss": 1.8147, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.1777624070551617, | |
| "grad_norm": 0.6481782793998718, | |
| "learning_rate": 8.667152221412966e-05, | |
| "loss": 1.9606, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.17845408957288605, | |
| "grad_norm": 0.719637393951416, | |
| "learning_rate": 8.659868900218499e-05, | |
| "loss": 1.2452, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.17914577209061042, | |
| "grad_norm": 0.6171206831932068, | |
| "learning_rate": 8.652585579024035e-05, | |
| "loss": 1.367, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.17983745460833478, | |
| "grad_norm": 0.618354320526123, | |
| "learning_rate": 8.64530225782957e-05, | |
| "loss": 1.9576, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.18052913712605914, | |
| "grad_norm": 0.6816132664680481, | |
| "learning_rate": 8.638018936635106e-05, | |
| "loss": 1.1272, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.1812208196437835, | |
| "grad_norm": 0.6745633482933044, | |
| "learning_rate": 8.630735615440642e-05, | |
| "loss": 2.1625, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.18191250216150787, | |
| "grad_norm": 0.8869469165802002, | |
| "learning_rate": 8.623452294246177e-05, | |
| "loss": 2.0639, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.18260418467923223, | |
| "grad_norm": 0.9394845366477966, | |
| "learning_rate": 8.616168973051712e-05, | |
| "loss": 1.6026, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.1832958671969566, | |
| "grad_norm": 0.6131375432014465, | |
| "learning_rate": 8.608885651857246e-05, | |
| "loss": 2.0913, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.18398754971468095, | |
| "grad_norm": 0.6758245229721069, | |
| "learning_rate": 8.601602330662782e-05, | |
| "loss": 1.2526, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.18467923223240532, | |
| "grad_norm": 0.7595621943473816, | |
| "learning_rate": 8.594319009468317e-05, | |
| "loss": 2.1043, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.18537091475012968, | |
| "grad_norm": 0.8051510453224182, | |
| "learning_rate": 8.587035688273853e-05, | |
| "loss": 1.5743, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.18606259726785407, | |
| "grad_norm": 0.6870526671409607, | |
| "learning_rate": 8.57975236707939e-05, | |
| "loss": 1.475, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.18675427978557843, | |
| "grad_norm": 0.6781574487686157, | |
| "learning_rate": 8.572469045884924e-05, | |
| "loss": 2.1719, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1874459623033028, | |
| "grad_norm": 0.7067494988441467, | |
| "learning_rate": 8.565185724690459e-05, | |
| "loss": 2.3096, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.18813764482102716, | |
| "grad_norm": 0.8800467848777771, | |
| "learning_rate": 8.557902403495994e-05, | |
| "loss": 1.9574, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.18882932733875152, | |
| "grad_norm": 0.5349634885787964, | |
| "learning_rate": 8.55061908230153e-05, | |
| "loss": 1.4169, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.18952100985647588, | |
| "grad_norm": 0.6334154009819031, | |
| "learning_rate": 8.543335761107065e-05, | |
| "loss": 1.9563, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.19021269237420024, | |
| "grad_norm": 0.8906685709953308, | |
| "learning_rate": 8.536052439912601e-05, | |
| "loss": 2.1072, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.1909043748919246, | |
| "grad_norm": 0.7288912534713745, | |
| "learning_rate": 8.528769118718137e-05, | |
| "loss": 1.9452, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.19159605740964897, | |
| "grad_norm": 1.1331645250320435, | |
| "learning_rate": 8.521485797523672e-05, | |
| "loss": 1.9652, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.19228773992737333, | |
| "grad_norm": 0.6032379269599915, | |
| "learning_rate": 8.514202476329206e-05, | |
| "loss": 1.9178, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.1929794224450977, | |
| "grad_norm": 0.6424663662910461, | |
| "learning_rate": 8.506919155134741e-05, | |
| "loss": 1.9931, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.19367110496282205, | |
| "grad_norm": 0.5931838750839233, | |
| "learning_rate": 8.499635833940277e-05, | |
| "loss": 1.2334, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.19436278748054642, | |
| "grad_norm": 0.7451290488243103, | |
| "learning_rate": 8.492352512745812e-05, | |
| "loss": 1.9906, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.1950544699982708, | |
| "grad_norm": 0.7306643128395081, | |
| "learning_rate": 8.485069191551348e-05, | |
| "loss": 2.1793, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.19574615251599517, | |
| "grad_norm": 0.6340318918228149, | |
| "learning_rate": 8.477785870356884e-05, | |
| "loss": 1.3639, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.19643783503371953, | |
| "grad_norm": 0.9806922674179077, | |
| "learning_rate": 8.470502549162418e-05, | |
| "loss": 1.5363, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.1971295175514439, | |
| "grad_norm": 0.6847662925720215, | |
| "learning_rate": 8.463219227967954e-05, | |
| "loss": 1.4774, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.19782120006916826, | |
| "grad_norm": 0.5814263224601746, | |
| "learning_rate": 8.455935906773488e-05, | |
| "loss": 1.4903, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.19851288258689262, | |
| "grad_norm": 0.582711935043335, | |
| "learning_rate": 8.448652585579024e-05, | |
| "loss": 1.479, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.19920456510461698, | |
| "grad_norm": 0.8541072607040405, | |
| "learning_rate": 8.441369264384559e-05, | |
| "loss": 1.6464, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.19989624762234134, | |
| "grad_norm": 0.6419352889060974, | |
| "learning_rate": 8.434085943190095e-05, | |
| "loss": 1.9266, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.2005879301400657, | |
| "grad_norm": 0.694685161113739, | |
| "learning_rate": 8.426802621995631e-05, | |
| "loss": 1.5357, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.20127961265779007, | |
| "grad_norm": 0.7046255469322205, | |
| "learning_rate": 8.419519300801165e-05, | |
| "loss": 1.4103, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.20197129517551443, | |
| "grad_norm": 1.0252553224563599, | |
| "learning_rate": 8.412235979606701e-05, | |
| "loss": 1.8172, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.2026629776932388, | |
| "grad_norm": 0.7682600021362305, | |
| "learning_rate": 8.404952658412236e-05, | |
| "loss": 1.455, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.20335466021096316, | |
| "grad_norm": 0.9454272389411926, | |
| "learning_rate": 8.397669337217772e-05, | |
| "loss": 1.5882, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.20404634272868752, | |
| "grad_norm": 1.333929419517517, | |
| "learning_rate": 8.390386016023307e-05, | |
| "loss": 1.6718, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2047380252464119, | |
| "grad_norm": 0.9410814046859741, | |
| "learning_rate": 8.383102694828843e-05, | |
| "loss": 2.0226, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.20542970776413627, | |
| "grad_norm": 0.7768377065658569, | |
| "learning_rate": 8.375819373634379e-05, | |
| "loss": 2.0137, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.20612139028186063, | |
| "grad_norm": 1.2727937698364258, | |
| "learning_rate": 8.368536052439912e-05, | |
| "loss": 1.9412, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.206813072799585, | |
| "grad_norm": 0.9015520811080933, | |
| "learning_rate": 8.361252731245448e-05, | |
| "loss": 1.6939, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.20750475531730936, | |
| "grad_norm": 0.5938074588775635, | |
| "learning_rate": 8.353969410050983e-05, | |
| "loss": 1.3246, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.20819643783503372, | |
| "grad_norm": 0.7636774182319641, | |
| "learning_rate": 8.346686088856519e-05, | |
| "loss": 1.687, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.20888812035275808, | |
| "grad_norm": 0.8219266533851624, | |
| "learning_rate": 8.339402767662054e-05, | |
| "loss": 1.9761, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.20957980287048245, | |
| "grad_norm": 0.8183760643005371, | |
| "learning_rate": 8.33211944646759e-05, | |
| "loss": 2.2887, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.2102714853882068, | |
| "grad_norm": 0.8675097227096558, | |
| "learning_rate": 8.324836125273125e-05, | |
| "loss": 2.0024, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.21096316790593117, | |
| "grad_norm": 0.5965743660926819, | |
| "learning_rate": 8.31755280407866e-05, | |
| "loss": 1.4569, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.21165485042365553, | |
| "grad_norm": 0.8379791378974915, | |
| "learning_rate": 8.310269482884196e-05, | |
| "loss": 1.9685, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.2123465329413799, | |
| "grad_norm": 0.5711637735366821, | |
| "learning_rate": 8.30298616168973e-05, | |
| "loss": 1.8149, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.21303821545910426, | |
| "grad_norm": 0.8288681507110596, | |
| "learning_rate": 8.295702840495266e-05, | |
| "loss": 2.0087, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.21372989797682865, | |
| "grad_norm": 0.7129418849945068, | |
| "learning_rate": 8.288419519300801e-05, | |
| "loss": 1.7503, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.214421580494553, | |
| "grad_norm": 0.7114739418029785, | |
| "learning_rate": 8.281136198106337e-05, | |
| "loss": 1.6055, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.21511326301227737, | |
| "grad_norm": 0.821076512336731, | |
| "learning_rate": 8.273852876911872e-05, | |
| "loss": 1.6553, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.21580494553000173, | |
| "grad_norm": 0.7488536238670349, | |
| "learning_rate": 8.266569555717407e-05, | |
| "loss": 1.9126, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.2164966280477261, | |
| "grad_norm": 0.822170615196228, | |
| "learning_rate": 8.259286234522943e-05, | |
| "loss": 2.0362, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.21718831056545046, | |
| "grad_norm": 0.7749188542366028, | |
| "learning_rate": 8.252002913328478e-05, | |
| "loss": 1.6924, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.21787999308317482, | |
| "grad_norm": 0.7008628845214844, | |
| "learning_rate": 8.244719592134014e-05, | |
| "loss": 2.1542, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.21857167560089918, | |
| "grad_norm": 0.8683955669403076, | |
| "learning_rate": 8.237436270939549e-05, | |
| "loss": 1.6697, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.21926335811862355, | |
| "grad_norm": 0.7530885338783264, | |
| "learning_rate": 8.230152949745085e-05, | |
| "loss": 1.2485, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.2199550406363479, | |
| "grad_norm": 0.787668764591217, | |
| "learning_rate": 8.22286962855062e-05, | |
| "loss": 1.1665, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.22064672315407227, | |
| "grad_norm": 0.7304548025131226, | |
| "learning_rate": 8.215586307356154e-05, | |
| "loss": 2.0012, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.22133840567179663, | |
| "grad_norm": 0.6933902502059937, | |
| "learning_rate": 8.20830298616169e-05, | |
| "loss": 1.8554, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.222030088189521, | |
| "grad_norm": 1.5794916152954102, | |
| "learning_rate": 8.201019664967225e-05, | |
| "loss": 1.6497, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.2227217707072454, | |
| "grad_norm": 0.788654625415802, | |
| "learning_rate": 8.193736343772761e-05, | |
| "loss": 1.9427, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.22341345322496975, | |
| "grad_norm": 0.6310513019561768, | |
| "learning_rate": 8.186453022578296e-05, | |
| "loss": 1.1892, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.2241051357426941, | |
| "grad_norm": 0.7976911664009094, | |
| "learning_rate": 8.17916970138383e-05, | |
| "loss": 1.8037, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.22479681826041847, | |
| "grad_norm": 0.8486120104789734, | |
| "learning_rate": 8.171886380189367e-05, | |
| "loss": 0.8769, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.22548850077814284, | |
| "grad_norm": 0.6910994648933411, | |
| "learning_rate": 8.164603058994901e-05, | |
| "loss": 1.2035, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.2261801832958672, | |
| "grad_norm": 0.7402215003967285, | |
| "learning_rate": 8.157319737800438e-05, | |
| "loss": 1.8359, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.22687186581359156, | |
| "grad_norm": 0.8741387724876404, | |
| "learning_rate": 8.150036416605972e-05, | |
| "loss": 2.1804, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.22756354833131592, | |
| "grad_norm": 0.7987048625946045, | |
| "learning_rate": 8.142753095411508e-05, | |
| "loss": 1.8705, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.22825523084904029, | |
| "grad_norm": 0.9527040123939514, | |
| "learning_rate": 8.135469774217043e-05, | |
| "loss": 1.9079, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.22894691336676465, | |
| "grad_norm": 0.7261909246444702, | |
| "learning_rate": 8.128186453022578e-05, | |
| "loss": 1.9903, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.229638595884489, | |
| "grad_norm": 0.7130205035209656, | |
| "learning_rate": 8.120903131828114e-05, | |
| "loss": 1.9952, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.23033027840221337, | |
| "grad_norm": 0.6477673649787903, | |
| "learning_rate": 8.113619810633649e-05, | |
| "loss": 1.4532, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.23102196091993774, | |
| "grad_norm": 0.7342897653579712, | |
| "learning_rate": 8.106336489439185e-05, | |
| "loss": 1.9345, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.23171364343766213, | |
| "grad_norm": 0.9584718346595764, | |
| "learning_rate": 8.09905316824472e-05, | |
| "loss": 2.1087, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.2324053259553865, | |
| "grad_norm": 0.7027872204780579, | |
| "learning_rate": 8.091769847050256e-05, | |
| "loss": 1.9372, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.23309700847311085, | |
| "grad_norm": 0.5855789184570312, | |
| "learning_rate": 8.08448652585579e-05, | |
| "loss": 1.7773, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.2337886909908352, | |
| "grad_norm": 0.6886003017425537, | |
| "learning_rate": 8.077203204661325e-05, | |
| "loss": 1.7705, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.23448037350855958, | |
| "grad_norm": 0.6997604966163635, | |
| "learning_rate": 8.069919883466861e-05, | |
| "loss": 1.3358, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.23517205602628394, | |
| "grad_norm": 0.6345038414001465, | |
| "learning_rate": 8.062636562272396e-05, | |
| "loss": 1.2833, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2358637385440083, | |
| "grad_norm": 0.6772000789642334, | |
| "learning_rate": 8.055353241077932e-05, | |
| "loss": 1.5009, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.23655542106173266, | |
| "grad_norm": 0.7856854200363159, | |
| "learning_rate": 8.048069919883467e-05, | |
| "loss": 1.8073, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.23724710357945702, | |
| "grad_norm": 0.729583740234375, | |
| "learning_rate": 8.040786598689003e-05, | |
| "loss": 2.3742, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.2379387860971814, | |
| "grad_norm": 0.6015090346336365, | |
| "learning_rate": 8.033503277494538e-05, | |
| "loss": 1.0806, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.23863046861490575, | |
| "grad_norm": 0.6671364307403564, | |
| "learning_rate": 8.026219956300073e-05, | |
| "loss": 1.3656, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2393221511326301, | |
| "grad_norm": 0.7358818054199219, | |
| "learning_rate": 8.018936635105609e-05, | |
| "loss": 1.769, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.24001383365035447, | |
| "grad_norm": 0.697853147983551, | |
| "learning_rate": 8.011653313911143e-05, | |
| "loss": 1.596, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.24070551616807886, | |
| "grad_norm": 0.6559710502624512, | |
| "learning_rate": 8.00436999271668e-05, | |
| "loss": 1.4413, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.24139719868580323, | |
| "grad_norm": 0.770010232925415, | |
| "learning_rate": 7.997086671522214e-05, | |
| "loss": 2.2503, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.2420888812035276, | |
| "grad_norm": 0.6468133926391602, | |
| "learning_rate": 7.98980335032775e-05, | |
| "loss": 2.0731, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.24278056372125195, | |
| "grad_norm": 0.8330799341201782, | |
| "learning_rate": 7.982520029133285e-05, | |
| "loss": 1.5362, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.24347224623897631, | |
| "grad_norm": 1.0678739547729492, | |
| "learning_rate": 7.97523670793882e-05, | |
| "loss": 1.814, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.24416392875670068, | |
| "grad_norm": 0.8693723678588867, | |
| "learning_rate": 7.967953386744356e-05, | |
| "loss": 2.3669, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.24485561127442504, | |
| "grad_norm": 0.7095674872398376, | |
| "learning_rate": 7.960670065549891e-05, | |
| "loss": 1.2215, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.2455472937921494, | |
| "grad_norm": 0.7264676094055176, | |
| "learning_rate": 7.953386744355427e-05, | |
| "loss": 1.7455, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.24623897630987376, | |
| "grad_norm": 0.7226927280426025, | |
| "learning_rate": 7.946103423160962e-05, | |
| "loss": 1.722, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.24693065882759813, | |
| "grad_norm": 0.8582272529602051, | |
| "learning_rate": 7.938820101966498e-05, | |
| "loss": 2.2391, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.2476223413453225, | |
| "grad_norm": 0.633518636226654, | |
| "learning_rate": 7.931536780772032e-05, | |
| "loss": 1.8529, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.24831402386304685, | |
| "grad_norm": 0.9155897498130798, | |
| "learning_rate": 7.924253459577567e-05, | |
| "loss": 1.5049, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.2490057063807712, | |
| "grad_norm": 0.8335781097412109, | |
| "learning_rate": 7.916970138383103e-05, | |
| "loss": 2.0309, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.2496973888984956, | |
| "grad_norm": 0.7219622731208801, | |
| "learning_rate": 7.909686817188638e-05, | |
| "loss": 1.1584, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.25038907141621997, | |
| "grad_norm": 0.8952975869178772, | |
| "learning_rate": 7.902403495994174e-05, | |
| "loss": 2.1489, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.2510807539339443, | |
| "grad_norm": 0.7409587502479553, | |
| "learning_rate": 7.895120174799709e-05, | |
| "loss": 2.0858, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.2517724364516687, | |
| "grad_norm": 0.5998899936676025, | |
| "learning_rate": 7.887836853605244e-05, | |
| "loss": 1.0058, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.252464118969393, | |
| "grad_norm": 0.7379968762397766, | |
| "learning_rate": 7.88055353241078e-05, | |
| "loss": 1.2178, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.2531558014871174, | |
| "grad_norm": 1.2425731420516968, | |
| "learning_rate": 7.873270211216315e-05, | |
| "loss": 1.5815, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.25384748400484175, | |
| "grad_norm": 0.7375080585479736, | |
| "learning_rate": 7.86598689002185e-05, | |
| "loss": 2.2113, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.25453916652256614, | |
| "grad_norm": 1.1793482303619385, | |
| "learning_rate": 7.858703568827385e-05, | |
| "loss": 1.7387, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.25523084904029053, | |
| "grad_norm": 1.397199273109436, | |
| "learning_rate": 7.851420247632922e-05, | |
| "loss": 1.288, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.25592253155801487, | |
| "grad_norm": 0.7143409252166748, | |
| "learning_rate": 7.844136926438456e-05, | |
| "loss": 1.6329, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.25661421407573926, | |
| "grad_norm": 0.6984686255455017, | |
| "learning_rate": 7.836853605243991e-05, | |
| "loss": 1.7279, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.2573058965934636, | |
| "grad_norm": 0.7117382287979126, | |
| "learning_rate": 7.829570284049527e-05, | |
| "loss": 1.8195, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.257997579111188, | |
| "grad_norm": 0.7039769887924194, | |
| "learning_rate": 7.822286962855062e-05, | |
| "loss": 1.8619, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.2586892616289123, | |
| "grad_norm": 1.0917625427246094, | |
| "learning_rate": 7.815003641660598e-05, | |
| "loss": 1.8889, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.2593809441466367, | |
| "grad_norm": 0.6653533577919006, | |
| "learning_rate": 7.807720320466133e-05, | |
| "loss": 1.8691, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.26007262666436104, | |
| "grad_norm": 0.7204722166061401, | |
| "learning_rate": 7.800436999271669e-05, | |
| "loss": 1.4544, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.26076430918208543, | |
| "grad_norm": 0.523810863494873, | |
| "learning_rate": 7.793153678077204e-05, | |
| "loss": 0.9303, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.26145599169980976, | |
| "grad_norm": 0.6838625073432922, | |
| "learning_rate": 7.785870356882738e-05, | |
| "loss": 1.6911, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.26214767421753415, | |
| "grad_norm": 0.7531947493553162, | |
| "learning_rate": 7.778587035688274e-05, | |
| "loss": 1.8134, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.2628393567352585, | |
| "grad_norm": 0.7090226411819458, | |
| "learning_rate": 7.771303714493809e-05, | |
| "loss": 2.1142, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.2635310392529829, | |
| "grad_norm": 0.7452750205993652, | |
| "learning_rate": 7.764020393299345e-05, | |
| "loss": 1.3778, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.26422272177070727, | |
| "grad_norm": 0.709818422794342, | |
| "learning_rate": 7.75673707210488e-05, | |
| "loss": 1.0387, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.2649144042884316, | |
| "grad_norm": 0.7260245680809021, | |
| "learning_rate": 7.749453750910416e-05, | |
| "loss": 1.5823, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.265606086806156, | |
| "grad_norm": 1.0123919248580933, | |
| "learning_rate": 7.742170429715951e-05, | |
| "loss": 1.7737, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.26629776932388033, | |
| "grad_norm": 0.92912757396698, | |
| "learning_rate": 7.734887108521486e-05, | |
| "loss": 1.0985, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2669894518416047, | |
| "grad_norm": 0.7285180687904358, | |
| "learning_rate": 7.727603787327022e-05, | |
| "loss": 1.6727, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.26768113435932905, | |
| "grad_norm": 0.9794962406158447, | |
| "learning_rate": 7.720320466132557e-05, | |
| "loss": 2.2499, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.26837281687705344, | |
| "grad_norm": 0.8266851902008057, | |
| "learning_rate": 7.713037144938093e-05, | |
| "loss": 1.7979, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.2690644993947778, | |
| "grad_norm": 0.8111055493354797, | |
| "learning_rate": 7.705753823743627e-05, | |
| "loss": 1.6586, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.26975618191250217, | |
| "grad_norm": 0.76368248462677, | |
| "learning_rate": 7.698470502549163e-05, | |
| "loss": 1.6407, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2704478644302265, | |
| "grad_norm": 0.7116588950157166, | |
| "learning_rate": 7.691187181354698e-05, | |
| "loss": 2.0222, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.2711395469479509, | |
| "grad_norm": 0.7411038875579834, | |
| "learning_rate": 7.683903860160233e-05, | |
| "loss": 1.9575, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.27183122946567523, | |
| "grad_norm": 0.6649416089057922, | |
| "learning_rate": 7.676620538965769e-05, | |
| "loss": 1.0947, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.2725229119833996, | |
| "grad_norm": 0.8183586597442627, | |
| "learning_rate": 7.669337217771304e-05, | |
| "loss": 1.5777, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.273214594501124, | |
| "grad_norm": 0.7400631904602051, | |
| "learning_rate": 7.66205389657684e-05, | |
| "loss": 1.9312, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.27390627701884834, | |
| "grad_norm": 0.6919555068016052, | |
| "learning_rate": 7.654770575382375e-05, | |
| "loss": 1.8406, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.27459795953657273, | |
| "grad_norm": 1.2769502401351929, | |
| "learning_rate": 7.647487254187911e-05, | |
| "loss": 2.2262, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.27528964205429707, | |
| "grad_norm": 0.9513121247291565, | |
| "learning_rate": 7.640203932993446e-05, | |
| "loss": 2.0865, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.27598132457202146, | |
| "grad_norm": 0.7086082100868225, | |
| "learning_rate": 7.63292061179898e-05, | |
| "loss": 1.8785, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.2766730070897458, | |
| "grad_norm": 0.7725284695625305, | |
| "learning_rate": 7.625637290604516e-05, | |
| "loss": 1.6341, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2773646896074702, | |
| "grad_norm": 0.819273829460144, | |
| "learning_rate": 7.618353969410051e-05, | |
| "loss": 1.6705, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.2780563721251945, | |
| "grad_norm": 0.7302514910697937, | |
| "learning_rate": 7.611070648215587e-05, | |
| "loss": 2.2798, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.2787480546429189, | |
| "grad_norm": 0.7020559906959534, | |
| "learning_rate": 7.603787327021122e-05, | |
| "loss": 1.549, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.27943973716064324, | |
| "grad_norm": 0.6759452819824219, | |
| "learning_rate": 7.596504005826657e-05, | |
| "loss": 0.9358, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.28013141967836763, | |
| "grad_norm": 0.8225228190422058, | |
| "learning_rate": 7.589220684632193e-05, | |
| "loss": 0.764, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.28082310219609197, | |
| "grad_norm": 0.6742099523544312, | |
| "learning_rate": 7.581937363437728e-05, | |
| "loss": 1.9551, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.28151478471381636, | |
| "grad_norm": 0.6638647317886353, | |
| "learning_rate": 7.574654042243264e-05, | |
| "loss": 1.1788, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.28220646723154075, | |
| "grad_norm": 0.8848326802253723, | |
| "learning_rate": 7.567370721048798e-05, | |
| "loss": 1.2803, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.2828981497492651, | |
| "grad_norm": 0.8638424873352051, | |
| "learning_rate": 7.560087399854335e-05, | |
| "loss": 2.0904, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.28358983226698947, | |
| "grad_norm": 0.8653358221054077, | |
| "learning_rate": 7.55280407865987e-05, | |
| "loss": 1.5278, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2842815147847138, | |
| "grad_norm": 0.9951801896095276, | |
| "learning_rate": 7.545520757465404e-05, | |
| "loss": 1.7283, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.2849731973024382, | |
| "grad_norm": 0.6709397435188293, | |
| "learning_rate": 7.53823743627094e-05, | |
| "loss": 1.7356, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.28566487982016253, | |
| "grad_norm": 0.6480190753936768, | |
| "learning_rate": 7.530954115076475e-05, | |
| "loss": 1.4348, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.2863565623378869, | |
| "grad_norm": 0.9461554288864136, | |
| "learning_rate": 7.523670793882011e-05, | |
| "loss": 1.7292, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.28704824485561126, | |
| "grad_norm": 0.632386326789856, | |
| "learning_rate": 7.516387472687546e-05, | |
| "loss": 1.7186, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.28773992737333565, | |
| "grad_norm": 0.702663004398346, | |
| "learning_rate": 7.509104151493082e-05, | |
| "loss": 1.9939, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.28843160989106, | |
| "grad_norm": 0.8615918755531311, | |
| "learning_rate": 7.501820830298617e-05, | |
| "loss": 1.195, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.28912329240878437, | |
| "grad_norm": 0.9229804277420044, | |
| "learning_rate": 7.494537509104151e-05, | |
| "loss": 1.155, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.2898149749265087, | |
| "grad_norm": 0.8323528170585632, | |
| "learning_rate": 7.487254187909688e-05, | |
| "loss": 1.9316, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.2905066574442331, | |
| "grad_norm": 1.3782216310501099, | |
| "learning_rate": 7.479970866715222e-05, | |
| "loss": 1.0946, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2911983399619575, | |
| "grad_norm": 0.6939939856529236, | |
| "learning_rate": 7.472687545520758e-05, | |
| "loss": 0.9669, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.2918900224796818, | |
| "grad_norm": 0.8428712487220764, | |
| "learning_rate": 7.465404224326293e-05, | |
| "loss": 1.0747, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.2925817049974062, | |
| "grad_norm": 0.8275536298751831, | |
| "learning_rate": 7.458120903131829e-05, | |
| "loss": 1.5087, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.29327338751513055, | |
| "grad_norm": 0.6948786973953247, | |
| "learning_rate": 7.450837581937363e-05, | |
| "loss": 1.604, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.29396507003285494, | |
| "grad_norm": 0.807960569858551, | |
| "learning_rate": 7.443554260742899e-05, | |
| "loss": 2.1098, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.29465675255057927, | |
| "grad_norm": 0.7478271722793579, | |
| "learning_rate": 7.436270939548435e-05, | |
| "loss": 2.1171, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.29534843506830366, | |
| "grad_norm": 0.6956362128257751, | |
| "learning_rate": 7.42898761835397e-05, | |
| "loss": 1.8568, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.296040117586028, | |
| "grad_norm": 1.215596079826355, | |
| "learning_rate": 7.421704297159506e-05, | |
| "loss": 1.6761, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.2967318001037524, | |
| "grad_norm": 0.623933732509613, | |
| "learning_rate": 7.41442097596504e-05, | |
| "loss": 1.807, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.2974234826214767, | |
| "grad_norm": 0.9105553030967712, | |
| "learning_rate": 7.407137654770577e-05, | |
| "loss": 1.8098, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2981151651392011, | |
| "grad_norm": 0.7315935492515564, | |
| "learning_rate": 7.39985433357611e-05, | |
| "loss": 1.6137, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.29880684765692545, | |
| "grad_norm": 0.6291669011116028, | |
| "learning_rate": 7.392571012381646e-05, | |
| "loss": 1.3534, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.29949853017464984, | |
| "grad_norm": 0.8852145671844482, | |
| "learning_rate": 7.385287691187182e-05, | |
| "loss": 1.8691, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.3001902126923742, | |
| "grad_norm": 0.8524838089942932, | |
| "learning_rate": 7.378004369992717e-05, | |
| "loss": 1.8757, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.30088189521009856, | |
| "grad_norm": 0.6716812252998352, | |
| "learning_rate": 7.370721048798253e-05, | |
| "loss": 1.6939, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.30157357772782295, | |
| "grad_norm": 0.9623708128929138, | |
| "learning_rate": 7.363437727603788e-05, | |
| "loss": 1.969, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.3022652602455473, | |
| "grad_norm": 0.8178302049636841, | |
| "learning_rate": 7.356154406409324e-05, | |
| "loss": 1.9667, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.3029569427632717, | |
| "grad_norm": 0.8692168593406677, | |
| "learning_rate": 7.348871085214857e-05, | |
| "loss": 1.8676, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.303648625280996, | |
| "grad_norm": 0.7447508573532104, | |
| "learning_rate": 7.341587764020393e-05, | |
| "loss": 1.1265, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.3043403077987204, | |
| "grad_norm": 1.3428897857666016, | |
| "learning_rate": 7.33430444282593e-05, | |
| "loss": 1.7997, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.30503199031644473, | |
| "grad_norm": 0.813104510307312, | |
| "learning_rate": 7.327021121631464e-05, | |
| "loss": 2.0517, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.3057236728341691, | |
| "grad_norm": 0.6551740169525146, | |
| "learning_rate": 7.319737800437e-05, | |
| "loss": 1.533, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.30641535535189346, | |
| "grad_norm": 0.7581228613853455, | |
| "learning_rate": 7.312454479242535e-05, | |
| "loss": 1.0951, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.30710703786961785, | |
| "grad_norm": 0.8519687652587891, | |
| "learning_rate": 7.30517115804807e-05, | |
| "loss": 1.8467, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.3077987203873422, | |
| "grad_norm": 0.5666621923446655, | |
| "learning_rate": 7.297887836853605e-05, | |
| "loss": 1.2154, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.3084904029050666, | |
| "grad_norm": 0.9918830990791321, | |
| "learning_rate": 7.290604515659141e-05, | |
| "loss": 1.3048, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.30918208542279096, | |
| "grad_norm": 0.6438243985176086, | |
| "learning_rate": 7.283321194464677e-05, | |
| "loss": 1.5674, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.3098737679405153, | |
| "grad_norm": 0.9330650568008423, | |
| "learning_rate": 7.276037873270212e-05, | |
| "loss": 1.4974, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.3105654504582397, | |
| "grad_norm": 0.6821238398551941, | |
| "learning_rate": 7.268754552075748e-05, | |
| "loss": 1.651, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.311257132975964, | |
| "grad_norm": 0.7768619656562805, | |
| "learning_rate": 7.261471230881282e-05, | |
| "loss": 2.1974, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.3119488154936884, | |
| "grad_norm": 0.8299420475959778, | |
| "learning_rate": 7.254187909686817e-05, | |
| "loss": 2.0038, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.31264049801141275, | |
| "grad_norm": 0.7438697814941406, | |
| "learning_rate": 7.246904588492352e-05, | |
| "loss": 1.4635, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.31333218052913714, | |
| "grad_norm": 1.1031484603881836, | |
| "learning_rate": 7.239621267297888e-05, | |
| "loss": 1.6887, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.3140238630468615, | |
| "grad_norm": 0.7802501916885376, | |
| "learning_rate": 7.232337946103424e-05, | |
| "loss": 1.9786, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.31471554556458586, | |
| "grad_norm": 0.6793453693389893, | |
| "learning_rate": 7.225054624908959e-05, | |
| "loss": 1.7345, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.3154072280823102, | |
| "grad_norm": 0.9189644455909729, | |
| "learning_rate": 7.217771303714495e-05, | |
| "loss": 1.7558, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.3160989106000346, | |
| "grad_norm": 0.8324854373931885, | |
| "learning_rate": 7.21048798252003e-05, | |
| "loss": 1.5798, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.3167905931177589, | |
| "grad_norm": 0.8758043050765991, | |
| "learning_rate": 7.203204661325565e-05, | |
| "loss": 1.2295, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.3174822756354833, | |
| "grad_norm": 0.9270045757293701, | |
| "learning_rate": 7.195921340131099e-05, | |
| "loss": 1.7192, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.3181739581532077, | |
| "grad_norm": 0.7827816605567932, | |
| "learning_rate": 7.188638018936635e-05, | |
| "loss": 1.2111, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.31886564067093204, | |
| "grad_norm": 0.8331306576728821, | |
| "learning_rate": 7.181354697742171e-05, | |
| "loss": 1.177, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.31955732318865643, | |
| "grad_norm": 0.8764622807502747, | |
| "learning_rate": 7.174071376547706e-05, | |
| "loss": 1.9659, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.32024900570638076, | |
| "grad_norm": 1.100085735321045, | |
| "learning_rate": 7.166788055353242e-05, | |
| "loss": 1.1225, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.32094068822410515, | |
| "grad_norm": 0.7537538409233093, | |
| "learning_rate": 7.159504734158776e-05, | |
| "loss": 1.9381, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.3216323707418295, | |
| "grad_norm": 0.7370760440826416, | |
| "learning_rate": 7.152221412964312e-05, | |
| "loss": 2.0892, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.3223240532595539, | |
| "grad_norm": 0.6847136616706848, | |
| "learning_rate": 7.144938091769847e-05, | |
| "loss": 2.1787, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.3230157357772782, | |
| "grad_norm": 0.644542396068573, | |
| "learning_rate": 7.137654770575383e-05, | |
| "loss": 1.0054, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.3237074182950026, | |
| "grad_norm": 0.7460236549377441, | |
| "learning_rate": 7.130371449380919e-05, | |
| "loss": 1.3674, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.32439910081272694, | |
| "grad_norm": 0.7521992325782776, | |
| "learning_rate": 7.123088128186454e-05, | |
| "loss": 1.5823, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.3250907833304513, | |
| "grad_norm": 0.6718364357948303, | |
| "learning_rate": 7.11580480699199e-05, | |
| "loss": 1.7935, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.32578246584817566, | |
| "grad_norm": 0.888697624206543, | |
| "learning_rate": 7.108521485797523e-05, | |
| "loss": 0.3041, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.32647414836590005, | |
| "grad_norm": 1.017770767211914, | |
| "learning_rate": 7.101238164603059e-05, | |
| "loss": 2.0955, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.32716583088362444, | |
| "grad_norm": 0.7191439867019653, | |
| "learning_rate": 7.093954843408594e-05, | |
| "loss": 2.1291, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.3278575134013488, | |
| "grad_norm": 0.6852924227714539, | |
| "learning_rate": 7.08667152221413e-05, | |
| "loss": 1.9207, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.32854919591907317, | |
| "grad_norm": 0.8421714901924133, | |
| "learning_rate": 7.079388201019666e-05, | |
| "loss": 1.9089, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.3292408784367975, | |
| "grad_norm": 0.8098534941673279, | |
| "learning_rate": 7.072104879825201e-05, | |
| "loss": 1.9783, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.3299325609545219, | |
| "grad_norm": 0.8071694374084473, | |
| "learning_rate": 7.064821558630737e-05, | |
| "loss": 2.2857, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.3306242434722462, | |
| "grad_norm": 0.7348088622093201, | |
| "learning_rate": 7.05753823743627e-05, | |
| "loss": 1.8574, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.3313159259899706, | |
| "grad_norm": 0.7593449950218201, | |
| "learning_rate": 7.050254916241806e-05, | |
| "loss": 1.8705, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.33200760850769495, | |
| "grad_norm": 0.9493029117584229, | |
| "learning_rate": 7.042971595047341e-05, | |
| "loss": 1.3423, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.33269929102541934, | |
| "grad_norm": 0.7410417199134827, | |
| "learning_rate": 7.035688273852877e-05, | |
| "loss": 1.5451, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.3333909735431437, | |
| "grad_norm": 0.7440536618232727, | |
| "learning_rate": 7.028404952658413e-05, | |
| "loss": 1.1216, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.33408265606086807, | |
| "grad_norm": 1.203395128250122, | |
| "learning_rate": 7.021121631463948e-05, | |
| "loss": 1.5371, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.3347743385785924, | |
| "grad_norm": 0.8062857985496521, | |
| "learning_rate": 7.013838310269483e-05, | |
| "loss": 1.9374, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.3354660210963168, | |
| "grad_norm": 1.429983139038086, | |
| "learning_rate": 7.006554989075018e-05, | |
| "loss": 2.1104, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.3361577036140412, | |
| "grad_norm": 1.058760404586792, | |
| "learning_rate": 6.999271667880554e-05, | |
| "loss": 1.6387, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.3368493861317655, | |
| "grad_norm": 0.8884686231613159, | |
| "learning_rate": 6.991988346686089e-05, | |
| "loss": 2.1725, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.3375410686494899, | |
| "grad_norm": 0.7855187058448792, | |
| "learning_rate": 6.984705025491625e-05, | |
| "loss": 1.4707, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.33823275116721424, | |
| "grad_norm": 0.8604965209960938, | |
| "learning_rate": 6.977421704297161e-05, | |
| "loss": 2.0402, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.33892443368493863, | |
| "grad_norm": 1.5640790462493896, | |
| "learning_rate": 6.970138383102696e-05, | |
| "loss": 1.1966, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.33961611620266297, | |
| "grad_norm": 0.8234649896621704, | |
| "learning_rate": 6.96285506190823e-05, | |
| "loss": 1.4036, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.34030779872038736, | |
| "grad_norm": 0.6905968189239502, | |
| "learning_rate": 6.955571740713765e-05, | |
| "loss": 1.1985, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.3409994812381117, | |
| "grad_norm": 0.6831912398338318, | |
| "learning_rate": 6.948288419519301e-05, | |
| "loss": 1.941, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.3416911637558361, | |
| "grad_norm": 1.2796226739883423, | |
| "learning_rate": 6.941005098324836e-05, | |
| "loss": 1.7139, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.3423828462735604, | |
| "grad_norm": 0.7864359021186829, | |
| "learning_rate": 6.933721777130372e-05, | |
| "loss": 1.6839, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.3430745287912848, | |
| "grad_norm": 0.7287671566009521, | |
| "learning_rate": 6.926438455935908e-05, | |
| "loss": 1.8232, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.34376621130900914, | |
| "grad_norm": 0.8624921441078186, | |
| "learning_rate": 6.919155134741443e-05, | |
| "loss": 1.993, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.34445789382673353, | |
| "grad_norm": 0.7365380525588989, | |
| "learning_rate": 6.911871813546978e-05, | |
| "loss": 1.6436, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.3451495763444579, | |
| "grad_norm": 0.7367970943450928, | |
| "learning_rate": 6.904588492352512e-05, | |
| "loss": 2.151, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.34584125886218225, | |
| "grad_norm": 0.7151849865913391, | |
| "learning_rate": 6.897305171158048e-05, | |
| "loss": 1.1134, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.34584125886218225, | |
| "eval_loss": 1.5177690982818604, | |
| "eval_runtime": 853.3383, | |
| "eval_samples_per_second": 1.506, | |
| "eval_steps_per_second": 0.754, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.34653294137990664, | |
| "grad_norm": 0.8107964992523193, | |
| "learning_rate": 6.890021849963583e-05, | |
| "loss": 2.1715, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.347224623897631, | |
| "grad_norm": 0.7223472595214844, | |
| "learning_rate": 6.882738528769119e-05, | |
| "loss": 1.8998, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.34791630641535537, | |
| "grad_norm": 0.7550078630447388, | |
| "learning_rate": 6.875455207574655e-05, | |
| "loss": 1.8598, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.3486079889330797, | |
| "grad_norm": 0.809738039970398, | |
| "learning_rate": 6.868171886380189e-05, | |
| "loss": 1.2291, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.3492996714508041, | |
| "grad_norm": 0.7400919795036316, | |
| "learning_rate": 6.860888565185725e-05, | |
| "loss": 1.6067, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.34999135396852843, | |
| "grad_norm": 0.6700140833854675, | |
| "learning_rate": 6.85360524399126e-05, | |
| "loss": 1.7842, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.3506830364862528, | |
| "grad_norm": 0.7716772556304932, | |
| "learning_rate": 6.846321922796796e-05, | |
| "loss": 1.2516, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.35137471900397715, | |
| "grad_norm": 0.6294199824333191, | |
| "learning_rate": 6.83903860160233e-05, | |
| "loss": 1.2296, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.35206640152170154, | |
| "grad_norm": 0.7753986716270447, | |
| "learning_rate": 6.831755280407867e-05, | |
| "loss": 1.4373, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.3527580840394259, | |
| "grad_norm": 0.7429059147834778, | |
| "learning_rate": 6.824471959213403e-05, | |
| "loss": 0.8905, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.35344976655715027, | |
| "grad_norm": 0.6748328804969788, | |
| "learning_rate": 6.817188638018936e-05, | |
| "loss": 1.4384, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.35414144907487466, | |
| "grad_norm": 1.1439710855484009, | |
| "learning_rate": 6.809905316824472e-05, | |
| "loss": 1.5329, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.354833131592599, | |
| "grad_norm": 0.7162743806838989, | |
| "learning_rate": 6.802621995630007e-05, | |
| "loss": 2.1022, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.3555248141103234, | |
| "grad_norm": 0.7660753726959229, | |
| "learning_rate": 6.795338674435543e-05, | |
| "loss": 2.1194, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.3562164966280477, | |
| "grad_norm": 0.7181832194328308, | |
| "learning_rate": 6.788055353241078e-05, | |
| "loss": 2.1352, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.3569081791457721, | |
| "grad_norm": 2.123023748397827, | |
| "learning_rate": 6.780772032046614e-05, | |
| "loss": 2.1801, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.35759986166349644, | |
| "grad_norm": 0.776202380657196, | |
| "learning_rate": 6.77348871085215e-05, | |
| "loss": 1.4564, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.35829154418122083, | |
| "grad_norm": 0.7404360175132751, | |
| "learning_rate": 6.766205389657683e-05, | |
| "loss": 1.6543, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.35898322669894517, | |
| "grad_norm": 0.807712197303772, | |
| "learning_rate": 6.75892206846322e-05, | |
| "loss": 2.0634, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.35967490921666956, | |
| "grad_norm": 0.8026816248893738, | |
| "learning_rate": 6.751638747268754e-05, | |
| "loss": 1.3129, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3603665917343939, | |
| "grad_norm": 0.724224328994751, | |
| "learning_rate": 6.74435542607429e-05, | |
| "loss": 1.5048, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.3610582742521183, | |
| "grad_norm": 0.7268713116645813, | |
| "learning_rate": 6.737072104879825e-05, | |
| "loss": 1.3965, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.3617499567698426, | |
| "grad_norm": 0.6612991690635681, | |
| "learning_rate": 6.729788783685361e-05, | |
| "loss": 1.9812, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.362441639287567, | |
| "grad_norm": 1.0223232507705688, | |
| "learning_rate": 6.722505462490896e-05, | |
| "loss": 1.3578, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.3631333218052914, | |
| "grad_norm": 0.9744395613670349, | |
| "learning_rate": 6.715222141296431e-05, | |
| "loss": 1.6046, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.36382500432301573, | |
| "grad_norm": 1.1930111646652222, | |
| "learning_rate": 6.707938820101967e-05, | |
| "loss": 2.1776, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.3645166868407401, | |
| "grad_norm": 0.8520130515098572, | |
| "learning_rate": 6.700655498907502e-05, | |
| "loss": 1.9713, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.36520836935846446, | |
| "grad_norm": 0.7934679985046387, | |
| "learning_rate": 6.693372177713038e-05, | |
| "loss": 1.5202, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.36590005187618885, | |
| "grad_norm": 0.8228335976600647, | |
| "learning_rate": 6.686088856518573e-05, | |
| "loss": 1.8218, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.3665917343939132, | |
| "grad_norm": 0.6831315755844116, | |
| "learning_rate": 6.678805535324109e-05, | |
| "loss": 1.3568, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3672834169116376, | |
| "grad_norm": 0.84079509973526, | |
| "learning_rate": 6.671522214129643e-05, | |
| "loss": 1.5994, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.3679750994293619, | |
| "grad_norm": 0.7911645770072937, | |
| "learning_rate": 6.664238892935178e-05, | |
| "loss": 1.0892, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.3686667819470863, | |
| "grad_norm": 0.7923005819320679, | |
| "learning_rate": 6.656955571740714e-05, | |
| "loss": 1.3006, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.36935846446481063, | |
| "grad_norm": 0.7611230611801147, | |
| "learning_rate": 6.649672250546249e-05, | |
| "loss": 1.2649, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.370050146982535, | |
| "grad_norm": 0.6713980436325073, | |
| "learning_rate": 6.642388929351785e-05, | |
| "loss": 2.1305, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.37074182950025936, | |
| "grad_norm": 1.0868254899978638, | |
| "learning_rate": 6.63510560815732e-05, | |
| "loss": 1.6372, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.37143351201798375, | |
| "grad_norm": 0.7361617684364319, | |
| "learning_rate": 6.627822286962856e-05, | |
| "loss": 1.8807, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.37212519453570814, | |
| "grad_norm": 0.7903793454170227, | |
| "learning_rate": 6.620538965768391e-05, | |
| "loss": 2.0083, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.37281687705343247, | |
| "grad_norm": 0.8564360737800598, | |
| "learning_rate": 6.613255644573925e-05, | |
| "loss": 2.1507, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.37350855957115686, | |
| "grad_norm": 0.6386767029762268, | |
| "learning_rate": 6.605972323379462e-05, | |
| "loss": 1.4482, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.3742002420888812, | |
| "grad_norm": 0.937019944190979, | |
| "learning_rate": 6.598689002184996e-05, | |
| "loss": 2.3786, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.3748919246066056, | |
| "grad_norm": 1.052297830581665, | |
| "learning_rate": 6.591405680990532e-05, | |
| "loss": 2.1223, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.3755836071243299, | |
| "grad_norm": 0.8783723711967468, | |
| "learning_rate": 6.584122359796067e-05, | |
| "loss": 1.7049, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.3762752896420543, | |
| "grad_norm": 0.9165799021720886, | |
| "learning_rate": 6.576839038601602e-05, | |
| "loss": 2.062, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.37696697215977865, | |
| "grad_norm": 0.722274661064148, | |
| "learning_rate": 6.569555717407138e-05, | |
| "loss": 2.0566, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.37765865467750304, | |
| "grad_norm": 0.6692977547645569, | |
| "learning_rate": 6.562272396212673e-05, | |
| "loss": 1.8474, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.37835033719522737, | |
| "grad_norm": 0.8514979481697083, | |
| "learning_rate": 6.554989075018209e-05, | |
| "loss": 2.1704, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.37904201971295176, | |
| "grad_norm": 0.8156755566596985, | |
| "learning_rate": 6.547705753823744e-05, | |
| "loss": 2.0282, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.3797337022306761, | |
| "grad_norm": 0.728892982006073, | |
| "learning_rate": 6.54042243262928e-05, | |
| "loss": 1.6018, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.3804253847484005, | |
| "grad_norm": 0.6585319638252258, | |
| "learning_rate": 6.533139111434814e-05, | |
| "loss": 1.2368, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3811170672661249, | |
| "grad_norm": 0.998591423034668, | |
| "learning_rate": 6.525855790240349e-05, | |
| "loss": 2.1364, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.3818087497838492, | |
| "grad_norm": 0.7700340151786804, | |
| "learning_rate": 6.518572469045885e-05, | |
| "loss": 1.602, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.3825004323015736, | |
| "grad_norm": 0.8582996129989624, | |
| "learning_rate": 6.51128914785142e-05, | |
| "loss": 1.9084, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.38319211481929794, | |
| "grad_norm": 0.6509499549865723, | |
| "learning_rate": 6.504005826656956e-05, | |
| "loss": 1.4457, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.3838837973370223, | |
| "grad_norm": 0.8587490320205688, | |
| "learning_rate": 6.496722505462491e-05, | |
| "loss": 1.5043, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.38457547985474666, | |
| "grad_norm": 0.8849571943283081, | |
| "learning_rate": 6.489439184268027e-05, | |
| "loss": 1.801, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.38526716237247105, | |
| "grad_norm": 0.8862190246582031, | |
| "learning_rate": 6.482155863073562e-05, | |
| "loss": 2.1286, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.3859588448901954, | |
| "grad_norm": 1.023720383644104, | |
| "learning_rate": 6.474872541879097e-05, | |
| "loss": 1.2497, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.3866505274079198, | |
| "grad_norm": 0.6962110996246338, | |
| "learning_rate": 6.467589220684633e-05, | |
| "loss": 1.8769, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.3873422099256441, | |
| "grad_norm": 0.8252090215682983, | |
| "learning_rate": 6.460305899490167e-05, | |
| "loss": 1.8998, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3880338924433685, | |
| "grad_norm": 0.7156630754470825, | |
| "learning_rate": 6.453022578295704e-05, | |
| "loss": 0.8263, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.38872557496109283, | |
| "grad_norm": 0.685234785079956, | |
| "learning_rate": 6.445739257101238e-05, | |
| "loss": 0.7216, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.3894172574788172, | |
| "grad_norm": 0.7868284583091736, | |
| "learning_rate": 6.438455935906774e-05, | |
| "loss": 2.0959, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.3901089399965416, | |
| "grad_norm": 0.8539921045303345, | |
| "learning_rate": 6.431172614712309e-05, | |
| "loss": 1.3863, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.39080062251426595, | |
| "grad_norm": 1.0316025018692017, | |
| "learning_rate": 6.423889293517844e-05, | |
| "loss": 1.0292, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.39149230503199034, | |
| "grad_norm": 0.7655407786369324, | |
| "learning_rate": 6.41660597232338e-05, | |
| "loss": 1.8794, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.3921839875497147, | |
| "grad_norm": 0.6936174035072327, | |
| "learning_rate": 6.409322651128915e-05, | |
| "loss": 1.3902, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.39287567006743906, | |
| "grad_norm": 0.7873200178146362, | |
| "learning_rate": 6.402039329934451e-05, | |
| "loss": 1.5278, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.3935673525851634, | |
| "grad_norm": 1.3935117721557617, | |
| "learning_rate": 6.394756008739986e-05, | |
| "loss": 0.7081, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.3942590351028878, | |
| "grad_norm": 0.7773651480674744, | |
| "learning_rate": 6.387472687545522e-05, | |
| "loss": 1.548, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3949507176206121, | |
| "grad_norm": 0.8644070625305176, | |
| "learning_rate": 6.380189366351056e-05, | |
| "loss": 1.4528, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.3956424001383365, | |
| "grad_norm": 0.7735791206359863, | |
| "learning_rate": 6.372906045156591e-05, | |
| "loss": 1.8554, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.39633408265606085, | |
| "grad_norm": 0.7515410780906677, | |
| "learning_rate": 6.365622723962127e-05, | |
| "loss": 1.786, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.39702576517378524, | |
| "grad_norm": 1.0612385272979736, | |
| "learning_rate": 6.358339402767662e-05, | |
| "loss": 1.3569, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.3977174476915096, | |
| "grad_norm": 0.919940173625946, | |
| "learning_rate": 6.351056081573198e-05, | |
| "loss": 2.0286, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.39840913020923396, | |
| "grad_norm": 0.8676942586898804, | |
| "learning_rate": 6.343772760378733e-05, | |
| "loss": 1.2646, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.39910081272695835, | |
| "grad_norm": 0.6343767642974854, | |
| "learning_rate": 6.336489439184269e-05, | |
| "loss": 0.9905, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.3997924952446827, | |
| "grad_norm": 1.3755874633789062, | |
| "learning_rate": 6.329206117989804e-05, | |
| "loss": 1.4657, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.4004841777624071, | |
| "grad_norm": 0.9563671946525574, | |
| "learning_rate": 6.321922796795339e-05, | |
| "loss": 1.6394, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.4011758602801314, | |
| "grad_norm": 1.3659626245498657, | |
| "learning_rate": 6.314639475600875e-05, | |
| "loss": 1.3927, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.4018675427978558, | |
| "grad_norm": 0.8047080039978027, | |
| "learning_rate": 6.30735615440641e-05, | |
| "loss": 1.9877, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.40255922531558014, | |
| "grad_norm": 0.872704267501831, | |
| "learning_rate": 6.300072833211945e-05, | |
| "loss": 1.7843, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.40325090783330453, | |
| "grad_norm": 0.5949119329452515, | |
| "learning_rate": 6.29278951201748e-05, | |
| "loss": 1.041, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.40394259035102886, | |
| "grad_norm": 0.8101840615272522, | |
| "learning_rate": 6.285506190823015e-05, | |
| "loss": 1.4963, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.40463427286875325, | |
| "grad_norm": 0.8406373858451843, | |
| "learning_rate": 6.278222869628551e-05, | |
| "loss": 1.0127, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.4053259553864776, | |
| "grad_norm": 0.9587801098823547, | |
| "learning_rate": 6.270939548434086e-05, | |
| "loss": 2.0066, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.406017637904202, | |
| "grad_norm": 0.9393382668495178, | |
| "learning_rate": 6.263656227239622e-05, | |
| "loss": 1.3456, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.4067093204219263, | |
| "grad_norm": 0.7072182297706604, | |
| "learning_rate": 6.256372906045157e-05, | |
| "loss": 1.381, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.4074010029396507, | |
| "grad_norm": 0.7435779571533203, | |
| "learning_rate": 6.249089584850693e-05, | |
| "loss": 1.3972, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.40809268545737504, | |
| "grad_norm": 0.8602951169013977, | |
| "learning_rate": 6.241806263656228e-05, | |
| "loss": 1.581, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.4087843679750994, | |
| "grad_norm": 0.975732684135437, | |
| "learning_rate": 6.234522942461762e-05, | |
| "loss": 2.0073, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.4094760504928238, | |
| "grad_norm": 0.8923630118370056, | |
| "learning_rate": 6.227239621267298e-05, | |
| "loss": 2.0375, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.41016773301054815, | |
| "grad_norm": 0.6812586188316345, | |
| "learning_rate": 6.219956300072833e-05, | |
| "loss": 1.3661, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.41085941552827254, | |
| "grad_norm": 1.0283271074295044, | |
| "learning_rate": 6.212672978878369e-05, | |
| "loss": 1.7544, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.4115510980459969, | |
| "grad_norm": 0.8602912425994873, | |
| "learning_rate": 6.205389657683904e-05, | |
| "loss": 1.9679, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.41224278056372127, | |
| "grad_norm": 0.7977223992347717, | |
| "learning_rate": 6.19810633648944e-05, | |
| "loss": 1.7853, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.4129344630814456, | |
| "grad_norm": 0.8174147605895996, | |
| "learning_rate": 6.190823015294975e-05, | |
| "loss": 1.8254, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.41362614559917, | |
| "grad_norm": 1.1011167764663696, | |
| "learning_rate": 6.18353969410051e-05, | |
| "loss": 1.6991, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.4143178281168943, | |
| "grad_norm": 0.8485236167907715, | |
| "learning_rate": 6.176256372906046e-05, | |
| "loss": 1.709, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.4150095106346187, | |
| "grad_norm": 0.8971522450447083, | |
| "learning_rate": 6.16897305171158e-05, | |
| "loss": 1.7905, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.41570119315234305, | |
| "grad_norm": 0.8392210602760315, | |
| "learning_rate": 6.161689730517117e-05, | |
| "loss": 1.5102, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.41639287567006744, | |
| "grad_norm": 0.7831077575683594, | |
| "learning_rate": 6.154406409322651e-05, | |
| "loss": 1.1469, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.4170845581877918, | |
| "grad_norm": 0.7571394443511963, | |
| "learning_rate": 6.147123088128187e-05, | |
| "loss": 1.1778, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.41777624070551617, | |
| "grad_norm": 0.7868969440460205, | |
| "learning_rate": 6.139839766933721e-05, | |
| "loss": 1.8391, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.41846792322324056, | |
| "grad_norm": 0.967659592628479, | |
| "learning_rate": 6.132556445739257e-05, | |
| "loss": 1.9437, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.4191596057409649, | |
| "grad_norm": 1.1932754516601562, | |
| "learning_rate": 6.125273124544793e-05, | |
| "loss": 2.0343, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.4198512882586893, | |
| "grad_norm": 0.692387044429779, | |
| "learning_rate": 6.117989803350328e-05, | |
| "loss": 1.2014, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.4205429707764136, | |
| "grad_norm": 0.9984387755393982, | |
| "learning_rate": 6.110706482155864e-05, | |
| "loss": 1.1817, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.421234653294138, | |
| "grad_norm": 1.1018468141555786, | |
| "learning_rate": 6.103423160961399e-05, | |
| "loss": 1.6788, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.42192633581186234, | |
| "grad_norm": 0.9406405091285706, | |
| "learning_rate": 6.096139839766935e-05, | |
| "loss": 1.5549, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.42261801832958673, | |
| "grad_norm": 0.867577075958252, | |
| "learning_rate": 6.088856518572469e-05, | |
| "loss": 2.139, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.42330970084731107, | |
| "grad_norm": 0.7860278487205505, | |
| "learning_rate": 6.081573197378004e-05, | |
| "loss": 1.6854, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.42400138336503546, | |
| "grad_norm": 0.884482741355896, | |
| "learning_rate": 6.07428987618354e-05, | |
| "loss": 1.304, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.4246930658827598, | |
| "grad_norm": 0.8831844329833984, | |
| "learning_rate": 6.067006554989075e-05, | |
| "loss": 1.9461, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.4253847484004842, | |
| "grad_norm": 0.831235945224762, | |
| "learning_rate": 6.0597232337946106e-05, | |
| "loss": 1.9683, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.4260764309182085, | |
| "grad_norm": 0.634988009929657, | |
| "learning_rate": 6.052439912600146e-05, | |
| "loss": 1.6396, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.4267681134359329, | |
| "grad_norm": 0.6646054983139038, | |
| "learning_rate": 6.045156591405682e-05, | |
| "loss": 1.1917, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.4274597959536573, | |
| "grad_norm": 0.9487079977989197, | |
| "learning_rate": 6.037873270211216e-05, | |
| "loss": 1.9417, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.42815147847138163, | |
| "grad_norm": 0.6744237542152405, | |
| "learning_rate": 6.0305899490167516e-05, | |
| "loss": 0.8873, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.428843160989106, | |
| "grad_norm": 0.8524720668792725, | |
| "learning_rate": 6.023306627822287e-05, | |
| "loss": 1.8525, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.42953484350683035, | |
| "grad_norm": 0.8825080394744873, | |
| "learning_rate": 6.0160233066278225e-05, | |
| "loss": 1.3985, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.43022652602455475, | |
| "grad_norm": 1.0640051364898682, | |
| "learning_rate": 6.008739985433358e-05, | |
| "loss": 2.0577, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.4309182085422791, | |
| "grad_norm": 1.0512269735336304, | |
| "learning_rate": 6.001456664238893e-05, | |
| "loss": 1.744, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.43160989106000347, | |
| "grad_norm": 0.6345813870429993, | |
| "learning_rate": 5.994173343044428e-05, | |
| "loss": 1.7586, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.4323015735777278, | |
| "grad_norm": 1.1025394201278687, | |
| "learning_rate": 5.9868900218499635e-05, | |
| "loss": 1.7011, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.4329932560954522, | |
| "grad_norm": 1.0276751518249512, | |
| "learning_rate": 5.979606700655499e-05, | |
| "loss": 1.8624, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.43368493861317653, | |
| "grad_norm": 0.9713005423545837, | |
| "learning_rate": 5.9723233794610344e-05, | |
| "loss": 1.5493, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.4343766211309009, | |
| "grad_norm": 0.7826513051986694, | |
| "learning_rate": 5.96504005826657e-05, | |
| "loss": 1.6086, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.43506830364862525, | |
| "grad_norm": 0.7218225002288818, | |
| "learning_rate": 5.957756737072105e-05, | |
| "loss": 1.5445, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.43575998616634964, | |
| "grad_norm": 0.8310878872871399, | |
| "learning_rate": 5.950473415877641e-05, | |
| "loss": 1.9013, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.43645166868407403, | |
| "grad_norm": 1.0086404085159302, | |
| "learning_rate": 5.9431900946831754e-05, | |
| "loss": 2.1482, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.43714335120179837, | |
| "grad_norm": 0.7284911870956421, | |
| "learning_rate": 5.935906773488711e-05, | |
| "loss": 1.5093, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.43783503371952276, | |
| "grad_norm": 0.8275542259216309, | |
| "learning_rate": 5.928623452294246e-05, | |
| "loss": 1.5309, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.4385267162372471, | |
| "grad_norm": 0.9007952809333801, | |
| "learning_rate": 5.921340131099782e-05, | |
| "loss": 2.0425, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.4392183987549715, | |
| "grad_norm": 0.6324974298477173, | |
| "learning_rate": 5.914056809905317e-05, | |
| "loss": 1.1334, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.4399100812726958, | |
| "grad_norm": 0.8025960326194763, | |
| "learning_rate": 5.9067734887108526e-05, | |
| "loss": 1.381, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.4406017637904202, | |
| "grad_norm": 1.008012294769287, | |
| "learning_rate": 5.899490167516388e-05, | |
| "loss": 1.7118, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.44129344630814454, | |
| "grad_norm": 0.9379268288612366, | |
| "learning_rate": 5.892206846321923e-05, | |
| "loss": 1.7564, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.44198512882586893, | |
| "grad_norm": 0.9095461964607239, | |
| "learning_rate": 5.884923525127458e-05, | |
| "loss": 1.7896, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.44267681134359327, | |
| "grad_norm": 0.8308787941932678, | |
| "learning_rate": 5.8776402039329936e-05, | |
| "loss": 1.7003, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.44336849386131766, | |
| "grad_norm": 0.7289214730262756, | |
| "learning_rate": 5.870356882738529e-05, | |
| "loss": 1.8206, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.444060176379042, | |
| "grad_norm": 0.6933354139328003, | |
| "learning_rate": 5.8630735615440645e-05, | |
| "loss": 1.4336, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.4447518588967664, | |
| "grad_norm": 0.6738266944885254, | |
| "learning_rate": 5.8557902403496e-05, | |
| "loss": 1.3322, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.4454435414144908, | |
| "grad_norm": 0.6887166500091553, | |
| "learning_rate": 5.8485069191551346e-05, | |
| "loss": 1.2408, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.4461352239322151, | |
| "grad_norm": 0.8172757029533386, | |
| "learning_rate": 5.84122359796067e-05, | |
| "loss": 1.0118, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.4468269064499395, | |
| "grad_norm": 0.8521438241004944, | |
| "learning_rate": 5.8339402767662055e-05, | |
| "loss": 1.9106, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.44751858896766383, | |
| "grad_norm": 0.6485393047332764, | |
| "learning_rate": 5.826656955571741e-05, | |
| "loss": 1.1009, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.4482102714853882, | |
| "grad_norm": 0.6865597367286682, | |
| "learning_rate": 5.8193736343772763e-05, | |
| "loss": 1.3842, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.44890195400311256, | |
| "grad_norm": 0.8116838335990906, | |
| "learning_rate": 5.812090313182812e-05, | |
| "loss": 1.61, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.44959363652083695, | |
| "grad_norm": 0.6286530494689941, | |
| "learning_rate": 5.804806991988347e-05, | |
| "loss": 1.6078, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.4502853190385613, | |
| "grad_norm": 1.3772015571594238, | |
| "learning_rate": 5.797523670793882e-05, | |
| "loss": 1.3162, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.4509770015562857, | |
| "grad_norm": 1.0899200439453125, | |
| "learning_rate": 5.7902403495994174e-05, | |
| "loss": 1.7445, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.45166868407401, | |
| "grad_norm": 0.8981252908706665, | |
| "learning_rate": 5.782957028404953e-05, | |
| "loss": 1.4101, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.4523603665917344, | |
| "grad_norm": 0.8054170608520508, | |
| "learning_rate": 5.775673707210488e-05, | |
| "loss": 1.5026, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.45305204910945873, | |
| "grad_norm": 0.7385475635528564, | |
| "learning_rate": 5.768390386016024e-05, | |
| "loss": 1.1069, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.4537437316271831, | |
| "grad_norm": 0.7658798098564148, | |
| "learning_rate": 5.761107064821559e-05, | |
| "loss": 1.4528, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.4544354141449075, | |
| "grad_norm": 0.8078415989875793, | |
| "learning_rate": 5.7538237436270945e-05, | |
| "loss": 1.9071, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.45512709666263185, | |
| "grad_norm": 0.8280815482139587, | |
| "learning_rate": 5.746540422432629e-05, | |
| "loss": 1.7216, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.45581877918035624, | |
| "grad_norm": 0.8316612243652344, | |
| "learning_rate": 5.739257101238165e-05, | |
| "loss": 1.8904, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.45651046169808057, | |
| "grad_norm": 0.6992987394332886, | |
| "learning_rate": 5.7319737800437e-05, | |
| "loss": 2.0018, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.45720214421580496, | |
| "grad_norm": 1.0973151922225952, | |
| "learning_rate": 5.7246904588492356e-05, | |
| "loss": 1.2894, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.4578938267335293, | |
| "grad_norm": 1.1129229068756104, | |
| "learning_rate": 5.717407137654771e-05, | |
| "loss": 2.0258, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.4585855092512537, | |
| "grad_norm": 0.8779314756393433, | |
| "learning_rate": 5.7101238164603064e-05, | |
| "loss": 1.887, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.459277191768978, | |
| "grad_norm": 0.7320972681045532, | |
| "learning_rate": 5.702840495265841e-05, | |
| "loss": 0.7374, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.4599688742867024, | |
| "grad_norm": 0.6837896704673767, | |
| "learning_rate": 5.6955571740713766e-05, | |
| "loss": 1.4664, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.46066055680442675, | |
| "grad_norm": 0.9255065321922302, | |
| "learning_rate": 5.688273852876912e-05, | |
| "loss": 1.4429, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.46135223932215114, | |
| "grad_norm": 0.7060451507568359, | |
| "learning_rate": 5.6809905316824475e-05, | |
| "loss": 1.8314, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.46204392183987547, | |
| "grad_norm": 0.6706486344337463, | |
| "learning_rate": 5.673707210487983e-05, | |
| "loss": 1.2471, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.46273560435759986, | |
| "grad_norm": 0.6796824336051941, | |
| "learning_rate": 5.666423889293518e-05, | |
| "loss": 1.9348, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.46342728687532425, | |
| "grad_norm": 0.8009390830993652, | |
| "learning_rate": 5.659140568099054e-05, | |
| "loss": 1.0403, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.4641189693930486, | |
| "grad_norm": 1.052715539932251, | |
| "learning_rate": 5.6518572469045885e-05, | |
| "loss": 1.4263, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.464810651910773, | |
| "grad_norm": 0.8654411435127258, | |
| "learning_rate": 5.644573925710124e-05, | |
| "loss": 1.5326, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.4655023344284973, | |
| "grad_norm": 0.8049682378768921, | |
| "learning_rate": 5.6372906045156594e-05, | |
| "loss": 1.8094, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.4661940169462217, | |
| "grad_norm": 0.6251804828643799, | |
| "learning_rate": 5.630007283321195e-05, | |
| "loss": 1.1813, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.46688569946394604, | |
| "grad_norm": 0.7386276125907898, | |
| "learning_rate": 5.62272396212673e-05, | |
| "loss": 1.3354, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.4675773819816704, | |
| "grad_norm": 0.8355931043624878, | |
| "learning_rate": 5.6154406409322656e-05, | |
| "loss": 1.515, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.46826906449939476, | |
| "grad_norm": 0.8459252715110779, | |
| "learning_rate": 5.608157319737801e-05, | |
| "loss": 2.2214, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.46896074701711915, | |
| "grad_norm": 0.8006874322891235, | |
| "learning_rate": 5.600873998543336e-05, | |
| "loss": 1.8108, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.4696524295348435, | |
| "grad_norm": 1.2057729959487915, | |
| "learning_rate": 5.593590677348871e-05, | |
| "loss": 1.2744, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.4703441120525679, | |
| "grad_norm": 0.8938283324241638, | |
| "learning_rate": 5.586307356154407e-05, | |
| "loss": 2.0349, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4710357945702922, | |
| "grad_norm": 0.8696619272232056, | |
| "learning_rate": 5.579024034959942e-05, | |
| "loss": 1.552, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.4717274770880166, | |
| "grad_norm": 0.8940863013267517, | |
| "learning_rate": 5.5717407137654775e-05, | |
| "loss": 1.5838, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.472419159605741, | |
| "grad_norm": 0.6525213122367859, | |
| "learning_rate": 5.564457392571013e-05, | |
| "loss": 1.4496, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.4731108421234653, | |
| "grad_norm": 0.8376979827880859, | |
| "learning_rate": 5.557174071376548e-05, | |
| "loss": 1.8819, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.4738025246411897, | |
| "grad_norm": 1.6936135292053223, | |
| "learning_rate": 5.549890750182083e-05, | |
| "loss": 1.5663, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.47449420715891405, | |
| "grad_norm": 0.7069249153137207, | |
| "learning_rate": 5.5426074289876186e-05, | |
| "loss": 1.9715, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.47518588967663844, | |
| "grad_norm": 0.7965700626373291, | |
| "learning_rate": 5.535324107793154e-05, | |
| "loss": 1.6389, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.4758775721943628, | |
| "grad_norm": 1.397172451019287, | |
| "learning_rate": 5.5280407865986894e-05, | |
| "loss": 1.5286, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.47656925471208716, | |
| "grad_norm": 0.7060932517051697, | |
| "learning_rate": 5.520757465404225e-05, | |
| "loss": 1.1942, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.4772609372298115, | |
| "grad_norm": 0.9016676545143127, | |
| "learning_rate": 5.51347414420976e-05, | |
| "loss": 1.5552, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4779526197475359, | |
| "grad_norm": 0.7569277882575989, | |
| "learning_rate": 5.506190823015295e-05, | |
| "loss": 2.1902, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.4786443022652602, | |
| "grad_norm": 0.8146937489509583, | |
| "learning_rate": 5.4989075018208305e-05, | |
| "loss": 1.3199, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.4793359847829846, | |
| "grad_norm": 0.8721641898155212, | |
| "learning_rate": 5.491624180626366e-05, | |
| "loss": 2.0197, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.48002766730070895, | |
| "grad_norm": 0.7318241596221924, | |
| "learning_rate": 5.484340859431901e-05, | |
| "loss": 1.8798, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.48071934981843334, | |
| "grad_norm": 0.9212726354598999, | |
| "learning_rate": 5.477057538237437e-05, | |
| "loss": 1.1606, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.48141103233615773, | |
| "grad_norm": 0.8294724822044373, | |
| "learning_rate": 5.469774217042972e-05, | |
| "loss": 1.0334, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.48210271485388206, | |
| "grad_norm": 0.9914029240608215, | |
| "learning_rate": 5.4624908958485076e-05, | |
| "loss": 1.9953, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.48279439737160645, | |
| "grad_norm": 0.6344811916351318, | |
| "learning_rate": 5.4552075746540424e-05, | |
| "loss": 0.8681, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.4834860798893308, | |
| "grad_norm": 0.864594578742981, | |
| "learning_rate": 5.447924253459578e-05, | |
| "loss": 1.5566, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.4841777624070552, | |
| "grad_norm": 0.6789159178733826, | |
| "learning_rate": 5.440640932265113e-05, | |
| "loss": 0.8052, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4848694449247795, | |
| "grad_norm": 0.7401624321937561, | |
| "learning_rate": 5.4333576110706487e-05, | |
| "loss": 1.3816, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.4855611274425039, | |
| "grad_norm": 0.8231687545776367, | |
| "learning_rate": 5.426074289876184e-05, | |
| "loss": 1.7608, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.48625280996022824, | |
| "grad_norm": 0.7512969374656677, | |
| "learning_rate": 5.4187909686817195e-05, | |
| "loss": 0.9139, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.48694449247795263, | |
| "grad_norm": 2.862189531326294, | |
| "learning_rate": 5.4115076474872536e-05, | |
| "loss": 1.4689, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.48763617499567696, | |
| "grad_norm": 0.8271031975746155, | |
| "learning_rate": 5.40422432629279e-05, | |
| "loss": 2.0143, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.48832785751340135, | |
| "grad_norm": 0.8950322270393372, | |
| "learning_rate": 5.396941005098325e-05, | |
| "loss": 1.9843, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.4890195400311257, | |
| "grad_norm": 0.6882688999176025, | |
| "learning_rate": 5.3896576839038606e-05, | |
| "loss": 1.2655, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.4897112225488501, | |
| "grad_norm": 0.990772545337677, | |
| "learning_rate": 5.382374362709396e-05, | |
| "loss": 2.0287, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.49040290506657447, | |
| "grad_norm": 0.8727272152900696, | |
| "learning_rate": 5.3750910415149314e-05, | |
| "loss": 1.5164, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.4910945875842988, | |
| "grad_norm": 0.7968826293945312, | |
| "learning_rate": 5.367807720320467e-05, | |
| "loss": 1.6738, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4917862701020232, | |
| "grad_norm": 0.8723095655441284, | |
| "learning_rate": 5.360524399126001e-05, | |
| "loss": 1.7006, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.4924779526197475, | |
| "grad_norm": 0.7931122779846191, | |
| "learning_rate": 5.353241077931537e-05, | |
| "loss": 2.2392, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.4931696351374719, | |
| "grad_norm": 0.7815384268760681, | |
| "learning_rate": 5.3459577567370724e-05, | |
| "loss": 1.5135, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.49386131765519625, | |
| "grad_norm": 0.6223137974739075, | |
| "learning_rate": 5.338674435542608e-05, | |
| "loss": 0.9653, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.49455300017292064, | |
| "grad_norm": 0.8133209347724915, | |
| "learning_rate": 5.331391114348143e-05, | |
| "loss": 1.7424, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.495244682690645, | |
| "grad_norm": 0.9321537017822266, | |
| "learning_rate": 5.324107793153679e-05, | |
| "loss": 2.057, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.49593636520836937, | |
| "grad_norm": 0.6840877532958984, | |
| "learning_rate": 5.316824471959214e-05, | |
| "loss": 1.814, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.4966280477260937, | |
| "grad_norm": 0.833908200263977, | |
| "learning_rate": 5.309541150764748e-05, | |
| "loss": 1.9942, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.4973197302438181, | |
| "grad_norm": 0.9567387104034424, | |
| "learning_rate": 5.3022578295702843e-05, | |
| "loss": 2.1152, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.4980114127615424, | |
| "grad_norm": 0.7711800336837769, | |
| "learning_rate": 5.29497450837582e-05, | |
| "loss": 2.0442, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4987030952792668, | |
| "grad_norm": 1.2333884239196777, | |
| "learning_rate": 5.287691187181355e-05, | |
| "loss": 1.5561, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.4993947777969912, | |
| "grad_norm": 0.7317585945129395, | |
| "learning_rate": 5.2804078659868906e-05, | |
| "loss": 1.7834, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.5000864603147156, | |
| "grad_norm": 0.8020793795585632, | |
| "learning_rate": 5.273124544792426e-05, | |
| "loss": 1.0252, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.5007781428324399, | |
| "grad_norm": 0.9253045916557312, | |
| "learning_rate": 5.26584122359796e-05, | |
| "loss": 2.0242, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.5014698253501643, | |
| "grad_norm": 0.8840431571006775, | |
| "learning_rate": 5.2585579024034956e-05, | |
| "loss": 1.1137, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.5021615078678886, | |
| "grad_norm": 0.9559575319290161, | |
| "learning_rate": 5.251274581209032e-05, | |
| "loss": 1.7501, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.502853190385613, | |
| "grad_norm": 1.340638518333435, | |
| "learning_rate": 5.243991260014567e-05, | |
| "loss": 1.9166, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.5035448729033374, | |
| "grad_norm": 0.8229387998580933, | |
| "learning_rate": 5.2367079388201025e-05, | |
| "loss": 1.9784, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.5042365554210617, | |
| "grad_norm": 0.9030795693397522, | |
| "learning_rate": 5.229424617625638e-05, | |
| "loss": 1.5277, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.504928237938786, | |
| "grad_norm": 1.2007991075515747, | |
| "learning_rate": 5.2221412964311734e-05, | |
| "loss": 1.552, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.5056199204565105, | |
| "grad_norm": 0.7516019940376282, | |
| "learning_rate": 5.2148579752367075e-05, | |
| "loss": 1.7162, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.5063116029742348, | |
| "grad_norm": 0.8056371212005615, | |
| "learning_rate": 5.207574654042243e-05, | |
| "loss": 1.8927, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.5070032854919592, | |
| "grad_norm": 0.839518666267395, | |
| "learning_rate": 5.200291332847779e-05, | |
| "loss": 1.828, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.5076949680096835, | |
| "grad_norm": 1.1328129768371582, | |
| "learning_rate": 5.1930080116533144e-05, | |
| "loss": 1.7335, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.508386650527408, | |
| "grad_norm": 0.9398472905158997, | |
| "learning_rate": 5.18572469045885e-05, | |
| "loss": 1.602, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.5090783330451323, | |
| "grad_norm": 0.9798534512519836, | |
| "learning_rate": 5.178441369264385e-05, | |
| "loss": 2.0447, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.5097700155628566, | |
| "grad_norm": 0.7347403168678284, | |
| "learning_rate": 5.171158048069921e-05, | |
| "loss": 1.8348, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.5104616980805811, | |
| "grad_norm": 1.5911897420883179, | |
| "learning_rate": 5.163874726875455e-05, | |
| "loss": 2.3152, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.5111533805983054, | |
| "grad_norm": 0.8013065457344055, | |
| "learning_rate": 5.15659140568099e-05, | |
| "loss": 1.6716, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.5118450631160297, | |
| "grad_norm": 0.7294222712516785, | |
| "learning_rate": 5.149308084486526e-05, | |
| "loss": 0.9302, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.5125367456337541, | |
| "grad_norm": 0.8843238949775696, | |
| "learning_rate": 5.142024763292062e-05, | |
| "loss": 2.0707, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.5132284281514785, | |
| "grad_norm": 0.8716109395027161, | |
| "learning_rate": 5.134741442097597e-05, | |
| "loss": 2.0515, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.5139201106692028, | |
| "grad_norm": 0.8159763813018799, | |
| "learning_rate": 5.1274581209031326e-05, | |
| "loss": 1.6694, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.5146117931869272, | |
| "grad_norm": 0.8231744766235352, | |
| "learning_rate": 5.120174799708667e-05, | |
| "loss": 1.0262, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.5153034757046515, | |
| "grad_norm": 0.8191300630569458, | |
| "learning_rate": 5.112891478514202e-05, | |
| "loss": 1.2072, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.515995158222376, | |
| "grad_norm": 0.775730550289154, | |
| "learning_rate": 5.1056081573197375e-05, | |
| "loss": 2.1546, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.5166868407401003, | |
| "grad_norm": 0.7432054877281189, | |
| "learning_rate": 5.0983248361252736e-05, | |
| "loss": 1.9489, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.5173785232578246, | |
| "grad_norm": 0.8312445282936096, | |
| "learning_rate": 5.091041514930809e-05, | |
| "loss": 1.3698, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.5180702057755491, | |
| "grad_norm": 0.631904125213623, | |
| "learning_rate": 5.0837581937363445e-05, | |
| "loss": 1.8463, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.5187618882932734, | |
| "grad_norm": 0.8167743682861328, | |
| "learning_rate": 5.07647487254188e-05, | |
| "loss": 1.5307, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.5194535708109977, | |
| "grad_norm": 0.8714820742607117, | |
| "learning_rate": 5.069191551347414e-05, | |
| "loss": 1.6496, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.5201452533287221, | |
| "grad_norm": 0.7490879893302917, | |
| "learning_rate": 5.0619082301529494e-05, | |
| "loss": 1.4393, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.5208369358464465, | |
| "grad_norm": 0.8953008651733398, | |
| "learning_rate": 5.054624908958485e-05, | |
| "loss": 1.3666, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.5215286183641709, | |
| "grad_norm": 0.8200691938400269, | |
| "learning_rate": 5.047341587764021e-05, | |
| "loss": 0.9515, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.5222203008818952, | |
| "grad_norm": 0.7947381734848022, | |
| "learning_rate": 5.0400582665695564e-05, | |
| "loss": 1.4832, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.5229119833996195, | |
| "grad_norm": 0.8643359541893005, | |
| "learning_rate": 5.032774945375092e-05, | |
| "loss": 1.7467, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.523603665917344, | |
| "grad_norm": 0.8458116054534912, | |
| "learning_rate": 5.025491624180627e-05, | |
| "loss": 1.789, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.5242953484350683, | |
| "grad_norm": 0.9124221205711365, | |
| "learning_rate": 5.018208302986161e-05, | |
| "loss": 1.892, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.5249870309527926, | |
| "grad_norm": 0.9957555532455444, | |
| "learning_rate": 5.010924981791697e-05, | |
| "loss": 1.4827, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.525678713470517, | |
| "grad_norm": 1.0438709259033203, | |
| "learning_rate": 5.003641660597232e-05, | |
| "loss": 2.1047, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.5263703959882414, | |
| "grad_norm": 0.8911094069480896, | |
| "learning_rate": 4.996358339402768e-05, | |
| "loss": 2.0326, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.5270620785059658, | |
| "grad_norm": 0.8831300735473633, | |
| "learning_rate": 4.989075018208304e-05, | |
| "loss": 1.3775, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.5277537610236901, | |
| "grad_norm": 0.6428950428962708, | |
| "learning_rate": 4.9817916970138385e-05, | |
| "loss": 1.1614, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.5284454435414145, | |
| "grad_norm": 0.7880016565322876, | |
| "learning_rate": 4.974508375819374e-05, | |
| "loss": 1.9676, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.5291371260591389, | |
| "grad_norm": 0.8804534077644348, | |
| "learning_rate": 4.967225054624909e-05, | |
| "loss": 1.6124, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.5298288085768632, | |
| "grad_norm": 1.2754981517791748, | |
| "learning_rate": 4.959941733430444e-05, | |
| "loss": 1.8053, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.5305204910945875, | |
| "grad_norm": 0.8418866395950317, | |
| "learning_rate": 4.9526584122359795e-05, | |
| "loss": 1.8303, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.531212173612312, | |
| "grad_norm": 0.8615307807922363, | |
| "learning_rate": 4.9453750910415156e-05, | |
| "loss": 1.456, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.5319038561300363, | |
| "grad_norm": 1.1805399656295776, | |
| "learning_rate": 4.9380917698470504e-05, | |
| "loss": 1.6149, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.5325955386477607, | |
| "grad_norm": 0.7090519666671753, | |
| "learning_rate": 4.930808448652586e-05, | |
| "loss": 1.297, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.533287221165485, | |
| "grad_norm": 1.0220928192138672, | |
| "learning_rate": 4.923525127458121e-05, | |
| "loss": 1.1299, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.5339789036832094, | |
| "grad_norm": 0.7434409856796265, | |
| "learning_rate": 4.9162418062636567e-05, | |
| "loss": 1.5863, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.5346705862009338, | |
| "grad_norm": 0.733049213886261, | |
| "learning_rate": 4.9089584850691914e-05, | |
| "loss": 1.4169, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.5353622687186581, | |
| "grad_norm": 0.8031397461891174, | |
| "learning_rate": 4.901675163874727e-05, | |
| "loss": 1.8983, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.5360539512363826, | |
| "grad_norm": 0.9674841165542603, | |
| "learning_rate": 4.894391842680263e-05, | |
| "loss": 1.8049, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.5367456337541069, | |
| "grad_norm": 0.676889181137085, | |
| "learning_rate": 4.887108521485798e-05, | |
| "loss": 1.3293, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.5374373162718312, | |
| "grad_norm": 1.0977071523666382, | |
| "learning_rate": 4.879825200291333e-05, | |
| "loss": 1.6395, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.5381289987895556, | |
| "grad_norm": 1.1417105197906494, | |
| "learning_rate": 4.8725418790968686e-05, | |
| "loss": 1.4429, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.53882068130728, | |
| "grad_norm": 0.7437918186187744, | |
| "learning_rate": 4.865258557902403e-05, | |
| "loss": 1.1679, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.5395123638250043, | |
| "grad_norm": 0.770984947681427, | |
| "learning_rate": 4.857975236707939e-05, | |
| "loss": 1.8322, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.5402040463427287, | |
| "grad_norm": 0.6398515701293945, | |
| "learning_rate": 4.850691915513474e-05, | |
| "loss": 1.3439, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.540895728860453, | |
| "grad_norm": 0.8483062982559204, | |
| "learning_rate": 4.84340859431901e-05, | |
| "loss": 1.7972, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.5415874113781775, | |
| "grad_norm": 1.1934760808944702, | |
| "learning_rate": 4.836125273124545e-05, | |
| "loss": 2.0156, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.5422790938959018, | |
| "grad_norm": 0.8203370571136475, | |
| "learning_rate": 4.8288419519300804e-05, | |
| "loss": 1.9463, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.5429707764136261, | |
| "grad_norm": 0.952383279800415, | |
| "learning_rate": 4.821558630735616e-05, | |
| "loss": 2.1658, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.5436624589313505, | |
| "grad_norm": 0.9334859251976013, | |
| "learning_rate": 4.8142753095411506e-05, | |
| "loss": 2.037, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.5443541414490749, | |
| "grad_norm": 0.762305736541748, | |
| "learning_rate": 4.806991988346686e-05, | |
| "loss": 1.4987, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.5450458239667992, | |
| "grad_norm": 0.8765081763267517, | |
| "learning_rate": 4.7997086671522215e-05, | |
| "loss": 1.5731, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.5457375064845236, | |
| "grad_norm": 0.8487034440040588, | |
| "learning_rate": 4.792425345957757e-05, | |
| "loss": 1.1847, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.546429189002248, | |
| "grad_norm": 1.0878641605377197, | |
| "learning_rate": 4.7851420247632923e-05, | |
| "loss": 1.6723, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.5471208715199724, | |
| "grad_norm": 1.0606861114501953, | |
| "learning_rate": 4.777858703568828e-05, | |
| "loss": 1.8039, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.5478125540376967, | |
| "grad_norm": 0.7105616927146912, | |
| "learning_rate": 4.770575382374363e-05, | |
| "loss": 1.4779, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.548504236555421, | |
| "grad_norm": 1.0841084718704224, | |
| "learning_rate": 4.763292061179898e-05, | |
| "loss": 1.3641, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.5491959190731455, | |
| "grad_norm": 1.6941927671432495, | |
| "learning_rate": 4.7560087399854334e-05, | |
| "loss": 1.3551, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.5498876015908698, | |
| "grad_norm": 0.5613378882408142, | |
| "learning_rate": 4.748725418790969e-05, | |
| "loss": 0.9728, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.5505792841085941, | |
| "grad_norm": 0.949262261390686, | |
| "learning_rate": 4.741442097596504e-05, | |
| "loss": 1.5284, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.5512709666263185, | |
| "grad_norm": 0.7887808680534363, | |
| "learning_rate": 4.73415877640204e-05, | |
| "loss": 2.0379, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.5519626491440429, | |
| "grad_norm": 1.312240481376648, | |
| "learning_rate": 4.726875455207575e-05, | |
| "loss": 1.3929, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.5526543316617673, | |
| "grad_norm": 0.9849413633346558, | |
| "learning_rate": 4.71959213401311e-05, | |
| "loss": 1.8282, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.5533460141794916, | |
| "grad_norm": 0.7647308111190796, | |
| "learning_rate": 4.712308812818645e-05, | |
| "loss": 2.0537, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.554037696697216, | |
| "grad_norm": 0.8727704882621765, | |
| "learning_rate": 4.705025491624181e-05, | |
| "loss": 1.9806, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.5547293792149404, | |
| "grad_norm": 0.8957968950271606, | |
| "learning_rate": 4.697742170429716e-05, | |
| "loss": 1.8797, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.5554210617326647, | |
| "grad_norm": 0.7387208938598633, | |
| "learning_rate": 4.6904588492352516e-05, | |
| "loss": 1.9765, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.556112744250389, | |
| "grad_norm": 0.7798844575881958, | |
| "learning_rate": 4.683175528040787e-05, | |
| "loss": 1.6741, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.5568044267681135, | |
| "grad_norm": 0.668756365776062, | |
| "learning_rate": 4.6758922068463224e-05, | |
| "loss": 1.4564, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.5574961092858378, | |
| "grad_norm": 0.7988947629928589, | |
| "learning_rate": 4.668608885651857e-05, | |
| "loss": 1.9911, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.5581877918035621, | |
| "grad_norm": 0.8549099564552307, | |
| "learning_rate": 4.6613255644573926e-05, | |
| "loss": 1.2702, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.5588794743212865, | |
| "grad_norm": 0.6838178634643555, | |
| "learning_rate": 4.654042243262928e-05, | |
| "loss": 1.438, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.5595711568390109, | |
| "grad_norm": 0.7729970812797546, | |
| "learning_rate": 4.6467589220684635e-05, | |
| "loss": 1.691, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.5602628393567353, | |
| "grad_norm": 0.6941683888435364, | |
| "learning_rate": 4.639475600873999e-05, | |
| "loss": 1.9519, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.5609545218744596, | |
| "grad_norm": 0.5844329595565796, | |
| "learning_rate": 4.632192279679534e-05, | |
| "loss": 1.2985, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.5616462043921839, | |
| "grad_norm": 0.8213440775871277, | |
| "learning_rate": 4.62490895848507e-05, | |
| "loss": 2.0151, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.5623378869099084, | |
| "grad_norm": 0.9131556749343872, | |
| "learning_rate": 4.6176256372906045e-05, | |
| "loss": 2.1183, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.5630295694276327, | |
| "grad_norm": 0.8296758532524109, | |
| "learning_rate": 4.61034231609614e-05, | |
| "loss": 1.5903, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.563721251945357, | |
| "grad_norm": 0.7798346877098083, | |
| "learning_rate": 4.6030589949016754e-05, | |
| "loss": 1.4633, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.5644129344630815, | |
| "grad_norm": 1.0995211601257324, | |
| "learning_rate": 4.595775673707211e-05, | |
| "loss": 1.5795, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.5651046169808058, | |
| "grad_norm": 0.8087084889411926, | |
| "learning_rate": 4.588492352512746e-05, | |
| "loss": 1.8313, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.5657962994985302, | |
| "grad_norm": 0.8411030173301697, | |
| "learning_rate": 4.5812090313182816e-05, | |
| "loss": 1.9952, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.5664879820162545, | |
| "grad_norm": 0.6816720962524414, | |
| "learning_rate": 4.5739257101238164e-05, | |
| "loss": 1.5931, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.5671796645339789, | |
| "grad_norm": 0.8486337661743164, | |
| "learning_rate": 4.566642388929352e-05, | |
| "loss": 1.7394, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.5678713470517033, | |
| "grad_norm": 0.863264262676239, | |
| "learning_rate": 4.559359067734887e-05, | |
| "loss": 1.8192, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.5685630295694276, | |
| "grad_norm": 0.5527456402778625, | |
| "learning_rate": 4.552075746540423e-05, | |
| "loss": 1.1761, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.569254712087152, | |
| "grad_norm": 0.894025981426239, | |
| "learning_rate": 4.544792425345958e-05, | |
| "loss": 2.0718, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.5699463946048764, | |
| "grad_norm": 0.9638246893882751, | |
| "learning_rate": 4.5375091041514935e-05, | |
| "loss": 2.0523, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.5706380771226007, | |
| "grad_norm": 0.6295531988143921, | |
| "learning_rate": 4.530225782957029e-05, | |
| "loss": 1.1311, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.5713297596403251, | |
| "grad_norm": 0.8926659822463989, | |
| "learning_rate": 4.522942461762564e-05, | |
| "loss": 1.8242, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.5720214421580495, | |
| "grad_norm": 0.9920228719711304, | |
| "learning_rate": 4.515659140568099e-05, | |
| "loss": 1.4683, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.5727131246757738, | |
| "grad_norm": 0.9386698007583618, | |
| "learning_rate": 4.5083758193736346e-05, | |
| "loss": 1.7646, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.5734048071934982, | |
| "grad_norm": 0.8631352186203003, | |
| "learning_rate": 4.50109249817917e-05, | |
| "loss": 1.5608, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.5740964897112225, | |
| "grad_norm": 0.8916756510734558, | |
| "learning_rate": 4.4938091769847054e-05, | |
| "loss": 1.5689, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.574788172228947, | |
| "grad_norm": 0.7972738146781921, | |
| "learning_rate": 4.486525855790241e-05, | |
| "loss": 1.4542, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.5754798547466713, | |
| "grad_norm": 0.8566955924034119, | |
| "learning_rate": 4.479242534595776e-05, | |
| "loss": 1.5176, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.5761715372643956, | |
| "grad_norm": 1.0372542142868042, | |
| "learning_rate": 4.471959213401311e-05, | |
| "loss": 1.4787, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.57686321978212, | |
| "grad_norm": 0.7778179049491882, | |
| "learning_rate": 4.4646758922068465e-05, | |
| "loss": 1.5001, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.5775549022998444, | |
| "grad_norm": 1.0114778280258179, | |
| "learning_rate": 4.457392571012382e-05, | |
| "loss": 1.7393, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.5782465848175687, | |
| "grad_norm": 0.8515650629997253, | |
| "learning_rate": 4.450109249817917e-05, | |
| "loss": 1.8581, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.5789382673352931, | |
| "grad_norm": 1.318204641342163, | |
| "learning_rate": 4.442825928623453e-05, | |
| "loss": 1.5188, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.5796299498530174, | |
| "grad_norm": 1.0010000467300415, | |
| "learning_rate": 4.435542607428988e-05, | |
| "loss": 1.6703, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.5803216323707419, | |
| "grad_norm": 0.7508212327957153, | |
| "learning_rate": 4.428259286234523e-05, | |
| "loss": 1.8278, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.5810133148884662, | |
| "grad_norm": 0.7435583472251892, | |
| "learning_rate": 4.4209759650400584e-05, | |
| "loss": 1.5339, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5817049974061905, | |
| "grad_norm": 0.6677370667457581, | |
| "learning_rate": 4.413692643845594e-05, | |
| "loss": 1.7071, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.582396679923915, | |
| "grad_norm": 0.6046009659767151, | |
| "learning_rate": 4.406409322651129e-05, | |
| "loss": 1.8311, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.5830883624416393, | |
| "grad_norm": 0.8315126895904541, | |
| "learning_rate": 4.3991260014566647e-05, | |
| "loss": 1.9365, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.5837800449593636, | |
| "grad_norm": 0.9006065726280212, | |
| "learning_rate": 4.3918426802622e-05, | |
| "loss": 1.4826, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.584471727477088, | |
| "grad_norm": 1.6106517314910889, | |
| "learning_rate": 4.3845593590677355e-05, | |
| "loss": 1.3065, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5851634099948124, | |
| "grad_norm": 0.6510477662086487, | |
| "learning_rate": 4.37727603787327e-05, | |
| "loss": 1.291, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.5858550925125368, | |
| "grad_norm": 1.2163658142089844, | |
| "learning_rate": 4.369992716678806e-05, | |
| "loss": 1.65, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.5865467750302611, | |
| "grad_norm": 0.7138450145721436, | |
| "learning_rate": 4.362709395484341e-05, | |
| "loss": 0.9941, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.5872384575479854, | |
| "grad_norm": 1.4511529207229614, | |
| "learning_rate": 4.355426074289876e-05, | |
| "loss": 1.1884, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.5879301400657099, | |
| "grad_norm": 0.806222677230835, | |
| "learning_rate": 4.348142753095411e-05, | |
| "loss": 1.5618, | |
| "step": 850 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1446, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 50, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.6963405870874624e+16, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |