gpt-neo-1.3B-vietnamese-chat / trainer_state.json
TheHuy's picture
Upload folder using huggingface_hub
9409041 verified
{
"best_global_step": 606,
"best_metric": 2.5098254680633545,
"best_model_checkpoint": "./gpt-neo-1.3B-qlora/checkpoint-606",
"epoch": 4.0,
"eval_steps": 500,
"global_step": 808,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004956629491945477,
"grad_norm": 0.5178479552268982,
"learning_rate": 0.0,
"loss": 4.4997,
"step": 1
},
{
"epoch": 0.009913258983890954,
"grad_norm": 0.5379428863525391,
"learning_rate": 8.000000000000001e-06,
"loss": 4.0383,
"step": 2
},
{
"epoch": 0.01486988847583643,
"grad_norm": 0.456396222114563,
"learning_rate": 1.6000000000000003e-05,
"loss": 3.8483,
"step": 3
},
{
"epoch": 0.01982651796778191,
"grad_norm": 0.4702649712562561,
"learning_rate": 2.4e-05,
"loss": 3.7618,
"step": 4
},
{
"epoch": 0.024783147459727387,
"grad_norm": 0.4205624461174011,
"learning_rate": 3.2000000000000005e-05,
"loss": 3.7436,
"step": 5
},
{
"epoch": 0.02973977695167286,
"grad_norm": 0.562913179397583,
"learning_rate": 4e-05,
"loss": 4.3193,
"step": 6
},
{
"epoch": 0.03469640644361834,
"grad_norm": 0.4989069700241089,
"learning_rate": 4.8e-05,
"loss": 3.8805,
"step": 7
},
{
"epoch": 0.03965303593556382,
"grad_norm": 0.5795283913612366,
"learning_rate": 5.6000000000000006e-05,
"loss": 3.751,
"step": 8
},
{
"epoch": 0.04460966542750929,
"grad_norm": 0.4642927348613739,
"learning_rate": 6.400000000000001e-05,
"loss": 3.6798,
"step": 9
},
{
"epoch": 0.04956629491945477,
"grad_norm": 0.49269798398017883,
"learning_rate": 7.2e-05,
"loss": 3.7732,
"step": 10
},
{
"epoch": 0.05452292441140025,
"grad_norm": 0.7436097860336304,
"learning_rate": 8e-05,
"loss": 4.5229,
"step": 11
},
{
"epoch": 0.05947955390334572,
"grad_norm": 0.6071892976760864,
"learning_rate": 8.800000000000001e-05,
"loss": 3.7029,
"step": 12
},
{
"epoch": 0.0644361833952912,
"grad_norm": 0.598869800567627,
"learning_rate": 9.6e-05,
"loss": 4.1203,
"step": 13
},
{
"epoch": 0.06939281288723669,
"grad_norm": 0.5983518958091736,
"learning_rate": 0.00010400000000000001,
"loss": 3.9319,
"step": 14
},
{
"epoch": 0.07434944237918216,
"grad_norm": 0.5899732112884521,
"learning_rate": 0.00011200000000000001,
"loss": 3.7153,
"step": 15
},
{
"epoch": 0.07930607187112763,
"grad_norm": 0.6922647356987,
"learning_rate": 0.00012,
"loss": 3.9409,
"step": 16
},
{
"epoch": 0.08426270136307311,
"grad_norm": 0.6090132594108582,
"learning_rate": 0.00012800000000000002,
"loss": 3.8406,
"step": 17
},
{
"epoch": 0.08921933085501858,
"grad_norm": 0.6153061389923096,
"learning_rate": 0.00013600000000000003,
"loss": 3.5257,
"step": 18
},
{
"epoch": 0.09417596034696406,
"grad_norm": 0.7484270930290222,
"learning_rate": 0.000144,
"loss": 3.6834,
"step": 19
},
{
"epoch": 0.09913258983890955,
"grad_norm": 0.7390730381011963,
"learning_rate": 0.000152,
"loss": 3.5609,
"step": 20
},
{
"epoch": 0.10408921933085502,
"grad_norm": 0.7254196405410767,
"learning_rate": 0.00016,
"loss": 3.0742,
"step": 21
},
{
"epoch": 0.1090458488228005,
"grad_norm": 0.7760971784591675,
"learning_rate": 0.000168,
"loss": 3.336,
"step": 22
},
{
"epoch": 0.11400247831474597,
"grad_norm": 0.7879131436347961,
"learning_rate": 0.00017600000000000002,
"loss": 3.6089,
"step": 23
},
{
"epoch": 0.11895910780669144,
"grad_norm": 0.7201817631721497,
"learning_rate": 0.00018400000000000003,
"loss": 3.4623,
"step": 24
},
{
"epoch": 0.12391573729863693,
"grad_norm": 0.9186316132545471,
"learning_rate": 0.000192,
"loss": 3.7456,
"step": 25
},
{
"epoch": 0.1288723667905824,
"grad_norm": 0.8382372260093689,
"learning_rate": 0.0002,
"loss": 3.5368,
"step": 26
},
{
"epoch": 0.13382899628252787,
"grad_norm": 1.2407952547073364,
"learning_rate": 0.00019974457215836527,
"loss": 3.36,
"step": 27
},
{
"epoch": 0.13878562577447337,
"grad_norm": 0.9732126593589783,
"learning_rate": 0.00019948914431673054,
"loss": 3.5255,
"step": 28
},
{
"epoch": 0.14374225526641884,
"grad_norm": 0.971034586429596,
"learning_rate": 0.0001992337164750958,
"loss": 3.1694,
"step": 29
},
{
"epoch": 0.14869888475836432,
"grad_norm": 1.368577480316162,
"learning_rate": 0.00019897828863346106,
"loss": 3.6517,
"step": 30
},
{
"epoch": 0.1536555142503098,
"grad_norm": 1.1814148426055908,
"learning_rate": 0.00019872286079182633,
"loss": 3.3308,
"step": 31
},
{
"epoch": 0.15861214374225527,
"grad_norm": 1.3007687330245972,
"learning_rate": 0.0001984674329501916,
"loss": 3.4267,
"step": 32
},
{
"epoch": 0.16356877323420074,
"grad_norm": 1.053661823272705,
"learning_rate": 0.00019821200510855685,
"loss": 3.4749,
"step": 33
},
{
"epoch": 0.16852540272614622,
"grad_norm": 1.454937219619751,
"learning_rate": 0.0001979565772669221,
"loss": 3.1697,
"step": 34
},
{
"epoch": 0.1734820322180917,
"grad_norm": 1.151451587677002,
"learning_rate": 0.00019770114942528738,
"loss": 2.8761,
"step": 35
},
{
"epoch": 0.17843866171003717,
"grad_norm": 1.0200417041778564,
"learning_rate": 0.00019744572158365262,
"loss": 3.3697,
"step": 36
},
{
"epoch": 0.18339529120198264,
"grad_norm": 1.136304259300232,
"learning_rate": 0.0001971902937420179,
"loss": 3.41,
"step": 37
},
{
"epoch": 0.18835192069392812,
"grad_norm": 1.0931888818740845,
"learning_rate": 0.00019693486590038314,
"loss": 3.5437,
"step": 38
},
{
"epoch": 0.19330855018587362,
"grad_norm": 1.4094620943069458,
"learning_rate": 0.00019667943805874843,
"loss": 3.6909,
"step": 39
},
{
"epoch": 0.1982651796778191,
"grad_norm": 0.9859973192214966,
"learning_rate": 0.00019642401021711367,
"loss": 3.1319,
"step": 40
},
{
"epoch": 0.20322180916976457,
"grad_norm": 1.330928921699524,
"learning_rate": 0.00019616858237547893,
"loss": 3.671,
"step": 41
},
{
"epoch": 0.20817843866171004,
"grad_norm": 0.8949196338653564,
"learning_rate": 0.0001959131545338442,
"loss": 2.7301,
"step": 42
},
{
"epoch": 0.21313506815365552,
"grad_norm": 0.8063453435897827,
"learning_rate": 0.00019565772669220946,
"loss": 2.7622,
"step": 43
},
{
"epoch": 0.218091697645601,
"grad_norm": 1.093867301940918,
"learning_rate": 0.00019540229885057472,
"loss": 3.0352,
"step": 44
},
{
"epoch": 0.22304832713754646,
"grad_norm": 1.1064800024032593,
"learning_rate": 0.00019514687100893999,
"loss": 3.3366,
"step": 45
},
{
"epoch": 0.22800495662949194,
"grad_norm": 0.9350125193595886,
"learning_rate": 0.00019489144316730525,
"loss": 2.7895,
"step": 46
},
{
"epoch": 0.23296158612143741,
"grad_norm": 1.234007716178894,
"learning_rate": 0.0001946360153256705,
"loss": 3.4759,
"step": 47
},
{
"epoch": 0.2379182156133829,
"grad_norm": 1.1140137910842896,
"learning_rate": 0.00019438058748403575,
"loss": 3.5556,
"step": 48
},
{
"epoch": 0.24287484510532836,
"grad_norm": 1.0842703580856323,
"learning_rate": 0.00019412515964240104,
"loss": 3.7215,
"step": 49
},
{
"epoch": 0.24783147459727387,
"grad_norm": 0.9289716482162476,
"learning_rate": 0.00019386973180076628,
"loss": 2.8763,
"step": 50
},
{
"epoch": 0.2527881040892193,
"grad_norm": 0.9746398329734802,
"learning_rate": 0.00019361430395913157,
"loss": 2.8321,
"step": 51
},
{
"epoch": 0.2577447335811648,
"grad_norm": 1.1200573444366455,
"learning_rate": 0.0001933588761174968,
"loss": 3.1944,
"step": 52
},
{
"epoch": 0.26270136307311026,
"grad_norm": 1.002164363861084,
"learning_rate": 0.0001931034482758621,
"loss": 2.9787,
"step": 53
},
{
"epoch": 0.26765799256505574,
"grad_norm": 1.002467155456543,
"learning_rate": 0.00019284802043422733,
"loss": 3.0303,
"step": 54
},
{
"epoch": 0.27261462205700127,
"grad_norm": 0.8421512842178345,
"learning_rate": 0.0001925925925925926,
"loss": 2.7362,
"step": 55
},
{
"epoch": 0.27757125154894674,
"grad_norm": 0.9212812185287476,
"learning_rate": 0.00019233716475095786,
"loss": 2.7722,
"step": 56
},
{
"epoch": 0.2825278810408922,
"grad_norm": 0.9771202802658081,
"learning_rate": 0.00019208173690932312,
"loss": 2.813,
"step": 57
},
{
"epoch": 0.2874845105328377,
"grad_norm": 0.9651036262512207,
"learning_rate": 0.00019182630906768838,
"loss": 3.1953,
"step": 58
},
{
"epoch": 0.29244114002478316,
"grad_norm": 0.8853598237037659,
"learning_rate": 0.00019157088122605365,
"loss": 2.5714,
"step": 59
},
{
"epoch": 0.29739776951672864,
"grad_norm": 1.213663101196289,
"learning_rate": 0.0001913154533844189,
"loss": 3.4754,
"step": 60
},
{
"epoch": 0.3023543990086741,
"grad_norm": 1.020679235458374,
"learning_rate": 0.00019106002554278417,
"loss": 2.64,
"step": 61
},
{
"epoch": 0.3073110285006196,
"grad_norm": 1.0737823247909546,
"learning_rate": 0.00019080459770114944,
"loss": 2.8907,
"step": 62
},
{
"epoch": 0.31226765799256506,
"grad_norm": 0.8646718263626099,
"learning_rate": 0.0001905491698595147,
"loss": 2.7505,
"step": 63
},
{
"epoch": 0.31722428748451054,
"grad_norm": 1.219244122505188,
"learning_rate": 0.00019029374201787996,
"loss": 3.4013,
"step": 64
},
{
"epoch": 0.322180916976456,
"grad_norm": 1.0713865756988525,
"learning_rate": 0.00019003831417624523,
"loss": 3.3184,
"step": 65
},
{
"epoch": 0.3271375464684015,
"grad_norm": 1.0803613662719727,
"learning_rate": 0.0001897828863346105,
"loss": 2.9004,
"step": 66
},
{
"epoch": 0.33209417596034696,
"grad_norm": 0.9669873714447021,
"learning_rate": 0.00018952745849297575,
"loss": 2.9939,
"step": 67
},
{
"epoch": 0.33705080545229243,
"grad_norm": 1.0816727876663208,
"learning_rate": 0.00018927203065134102,
"loss": 2.9023,
"step": 68
},
{
"epoch": 0.3420074349442379,
"grad_norm": 1.0009043216705322,
"learning_rate": 0.00018901660280970628,
"loss": 2.8928,
"step": 69
},
{
"epoch": 0.3469640644361834,
"grad_norm": 1.0465470552444458,
"learning_rate": 0.00018876117496807154,
"loss": 3.0128,
"step": 70
},
{
"epoch": 0.35192069392812886,
"grad_norm": 1.0517007112503052,
"learning_rate": 0.00018850574712643678,
"loss": 3.0419,
"step": 71
},
{
"epoch": 0.35687732342007433,
"grad_norm": 1.1139233112335205,
"learning_rate": 0.00018825031928480207,
"loss": 2.7776,
"step": 72
},
{
"epoch": 0.3618339529120198,
"grad_norm": 1.0560076236724854,
"learning_rate": 0.0001879948914431673,
"loss": 3.276,
"step": 73
},
{
"epoch": 0.3667905824039653,
"grad_norm": 1.054962396621704,
"learning_rate": 0.0001877394636015326,
"loss": 2.8159,
"step": 74
},
{
"epoch": 0.37174721189591076,
"grad_norm": 1.1142494678497314,
"learning_rate": 0.00018748403575989783,
"loss": 2.829,
"step": 75
},
{
"epoch": 0.37670384138785623,
"grad_norm": 1.9403822422027588,
"learning_rate": 0.00018722860791826312,
"loss": 3.2541,
"step": 76
},
{
"epoch": 0.38166047087980176,
"grad_norm": 1.3220140933990479,
"learning_rate": 0.00018697318007662836,
"loss": 3.5106,
"step": 77
},
{
"epoch": 0.38661710037174724,
"grad_norm": 1.1846554279327393,
"learning_rate": 0.00018671775223499362,
"loss": 3.0229,
"step": 78
},
{
"epoch": 0.3915737298636927,
"grad_norm": 0.9640692472457886,
"learning_rate": 0.00018646232439335889,
"loss": 2.6364,
"step": 79
},
{
"epoch": 0.3965303593556382,
"grad_norm": 0.9236942529678345,
"learning_rate": 0.00018620689655172415,
"loss": 2.8003,
"step": 80
},
{
"epoch": 0.40148698884758366,
"grad_norm": 1.1610803604125977,
"learning_rate": 0.0001859514687100894,
"loss": 3.2429,
"step": 81
},
{
"epoch": 0.40644361833952913,
"grad_norm": 1.1774544715881348,
"learning_rate": 0.00018569604086845468,
"loss": 3.0449,
"step": 82
},
{
"epoch": 0.4114002478314746,
"grad_norm": 2.4880337715148926,
"learning_rate": 0.00018544061302681994,
"loss": 2.8562,
"step": 83
},
{
"epoch": 0.4163568773234201,
"grad_norm": 0.991893470287323,
"learning_rate": 0.0001851851851851852,
"loss": 2.7547,
"step": 84
},
{
"epoch": 0.42131350681536556,
"grad_norm": 1.6182878017425537,
"learning_rate": 0.00018492975734355044,
"loss": 3.5209,
"step": 85
},
{
"epoch": 0.42627013630731103,
"grad_norm": 0.9980579018592834,
"learning_rate": 0.00018467432950191573,
"loss": 2.6094,
"step": 86
},
{
"epoch": 0.4312267657992565,
"grad_norm": 0.8960520625114441,
"learning_rate": 0.00018441890166028097,
"loss": 2.5804,
"step": 87
},
{
"epoch": 0.436183395291202,
"grad_norm": 1.015406847000122,
"learning_rate": 0.00018416347381864626,
"loss": 2.5902,
"step": 88
},
{
"epoch": 0.44114002478314746,
"grad_norm": 0.9714847803115845,
"learning_rate": 0.0001839080459770115,
"loss": 2.841,
"step": 89
},
{
"epoch": 0.44609665427509293,
"grad_norm": 1.069502592086792,
"learning_rate": 0.00018365261813537678,
"loss": 2.7831,
"step": 90
},
{
"epoch": 0.4510532837670384,
"grad_norm": 0.8945897221565247,
"learning_rate": 0.00018339719029374202,
"loss": 2.333,
"step": 91
},
{
"epoch": 0.4560099132589839,
"grad_norm": 1.0069011449813843,
"learning_rate": 0.00018314176245210728,
"loss": 2.9101,
"step": 92
},
{
"epoch": 0.46096654275092935,
"grad_norm": 1.3870095014572144,
"learning_rate": 0.00018288633461047255,
"loss": 2.8714,
"step": 93
},
{
"epoch": 0.46592317224287483,
"grad_norm": 1.0033080577850342,
"learning_rate": 0.0001826309067688378,
"loss": 2.7687,
"step": 94
},
{
"epoch": 0.4708798017348203,
"grad_norm": 1.1281189918518066,
"learning_rate": 0.00018237547892720307,
"loss": 2.7909,
"step": 95
},
{
"epoch": 0.4758364312267658,
"grad_norm": 1.1801944971084595,
"learning_rate": 0.00018212005108556834,
"loss": 2.9649,
"step": 96
},
{
"epoch": 0.48079306071871125,
"grad_norm": 0.9499044418334961,
"learning_rate": 0.0001818646232439336,
"loss": 2.7359,
"step": 97
},
{
"epoch": 0.4857496902106567,
"grad_norm": 1.0616381168365479,
"learning_rate": 0.00018160919540229886,
"loss": 2.6311,
"step": 98
},
{
"epoch": 0.49070631970260226,
"grad_norm": 1.2957072257995605,
"learning_rate": 0.0001813537675606641,
"loss": 3.3982,
"step": 99
},
{
"epoch": 0.49566294919454773,
"grad_norm": 1.4070500135421753,
"learning_rate": 0.0001810983397190294,
"loss": 3.4525,
"step": 100
},
{
"epoch": 0.5006195786864932,
"grad_norm": 1.0367472171783447,
"learning_rate": 0.00018084291187739463,
"loss": 2.5367,
"step": 101
},
{
"epoch": 0.5055762081784386,
"grad_norm": 1.1069153547286987,
"learning_rate": 0.00018058748403575992,
"loss": 2.8477,
"step": 102
},
{
"epoch": 0.5105328376703842,
"grad_norm": 1.6208503246307373,
"learning_rate": 0.00018033205619412515,
"loss": 3.0734,
"step": 103
},
{
"epoch": 0.5154894671623296,
"grad_norm": 1.0800952911376953,
"learning_rate": 0.00018007662835249044,
"loss": 3.0183,
"step": 104
},
{
"epoch": 0.5204460966542751,
"grad_norm": 1.0784571170806885,
"learning_rate": 0.00017982120051085568,
"loss": 2.8266,
"step": 105
},
{
"epoch": 0.5254027261462205,
"grad_norm": 1.3548469543457031,
"learning_rate": 0.00017956577266922094,
"loss": 2.8956,
"step": 106
},
{
"epoch": 0.530359355638166,
"grad_norm": 0.967523455619812,
"learning_rate": 0.0001793103448275862,
"loss": 2.4301,
"step": 107
},
{
"epoch": 0.5353159851301115,
"grad_norm": 1.3245588541030884,
"learning_rate": 0.00017905491698595147,
"loss": 2.6761,
"step": 108
},
{
"epoch": 0.540272614622057,
"grad_norm": 1.3608037233352661,
"learning_rate": 0.00017879948914431673,
"loss": 2.5499,
"step": 109
},
{
"epoch": 0.5452292441140025,
"grad_norm": 1.27790367603302,
"learning_rate": 0.000178544061302682,
"loss": 2.8868,
"step": 110
},
{
"epoch": 0.550185873605948,
"grad_norm": 1.031919002532959,
"learning_rate": 0.00017828863346104726,
"loss": 2.6039,
"step": 111
},
{
"epoch": 0.5551425030978935,
"grad_norm": 1.1233882904052734,
"learning_rate": 0.00017803320561941252,
"loss": 2.7838,
"step": 112
},
{
"epoch": 0.5600991325898389,
"grad_norm": 1.2760884761810303,
"learning_rate": 0.00017777777777777779,
"loss": 3.0326,
"step": 113
},
{
"epoch": 0.5650557620817844,
"grad_norm": 1.0590308904647827,
"learning_rate": 0.00017752234993614305,
"loss": 2.8617,
"step": 114
},
{
"epoch": 0.5700123915737298,
"grad_norm": 1.1054567098617554,
"learning_rate": 0.0001772669220945083,
"loss": 2.8121,
"step": 115
},
{
"epoch": 0.5749690210656754,
"grad_norm": 1.1285626888275146,
"learning_rate": 0.00017701149425287358,
"loss": 2.7076,
"step": 116
},
{
"epoch": 0.5799256505576208,
"grad_norm": 1.1753144264221191,
"learning_rate": 0.00017675606641123884,
"loss": 2.8147,
"step": 117
},
{
"epoch": 0.5848822800495663,
"grad_norm": 1.167649745941162,
"learning_rate": 0.0001765006385696041,
"loss": 2.8485,
"step": 118
},
{
"epoch": 0.5898389095415117,
"grad_norm": 1.1729902029037476,
"learning_rate": 0.00017624521072796937,
"loss": 2.7919,
"step": 119
},
{
"epoch": 0.5947955390334573,
"grad_norm": 1.14457368850708,
"learning_rate": 0.0001759897828863346,
"loss": 2.6986,
"step": 120
},
{
"epoch": 0.5997521685254027,
"grad_norm": 1.020176649093628,
"learning_rate": 0.0001757343550446999,
"loss": 2.3607,
"step": 121
},
{
"epoch": 0.6047087980173482,
"grad_norm": 1.3097666501998901,
"learning_rate": 0.00017547892720306513,
"loss": 2.8609,
"step": 122
},
{
"epoch": 0.6096654275092936,
"grad_norm": 1.139198899269104,
"learning_rate": 0.00017522349936143042,
"loss": 2.8653,
"step": 123
},
{
"epoch": 0.6146220570012392,
"grad_norm": 1.3521591424942017,
"learning_rate": 0.00017496807151979566,
"loss": 2.6918,
"step": 124
},
{
"epoch": 0.6195786864931846,
"grad_norm": 1.0930819511413574,
"learning_rate": 0.00017471264367816095,
"loss": 2.7736,
"step": 125
},
{
"epoch": 0.6245353159851301,
"grad_norm": 1.0400232076644897,
"learning_rate": 0.00017445721583652618,
"loss": 2.376,
"step": 126
},
{
"epoch": 0.6294919454770755,
"grad_norm": 1.5003938674926758,
"learning_rate": 0.00017420178799489145,
"loss": 2.7772,
"step": 127
},
{
"epoch": 0.6344485749690211,
"grad_norm": 1.3468868732452393,
"learning_rate": 0.0001739463601532567,
"loss": 2.7663,
"step": 128
},
{
"epoch": 0.6394052044609665,
"grad_norm": 1.1979831457138062,
"learning_rate": 0.00017369093231162197,
"loss": 2.3247,
"step": 129
},
{
"epoch": 0.644361833952912,
"grad_norm": 1.50636625289917,
"learning_rate": 0.00017343550446998724,
"loss": 3.2306,
"step": 130
},
{
"epoch": 0.6493184634448576,
"grad_norm": 1.3549541234970093,
"learning_rate": 0.0001731800766283525,
"loss": 2.9172,
"step": 131
},
{
"epoch": 0.654275092936803,
"grad_norm": 1.2008872032165527,
"learning_rate": 0.00017292464878671776,
"loss": 2.8561,
"step": 132
},
{
"epoch": 0.6592317224287485,
"grad_norm": 1.166435956954956,
"learning_rate": 0.00017266922094508303,
"loss": 2.6782,
"step": 133
},
{
"epoch": 0.6641883519206939,
"grad_norm": 1.8413958549499512,
"learning_rate": 0.00017241379310344826,
"loss": 2.932,
"step": 134
},
{
"epoch": 0.6691449814126395,
"grad_norm": 1.2101318836212158,
"learning_rate": 0.00017215836526181355,
"loss": 3.0967,
"step": 135
},
{
"epoch": 0.6741016109045849,
"grad_norm": 1.160033106803894,
"learning_rate": 0.0001719029374201788,
"loss": 2.79,
"step": 136
},
{
"epoch": 0.6790582403965304,
"grad_norm": 1.367043375968933,
"learning_rate": 0.00017164750957854408,
"loss": 2.6743,
"step": 137
},
{
"epoch": 0.6840148698884758,
"grad_norm": 0.9590573906898499,
"learning_rate": 0.00017139208173690932,
"loss": 2.6285,
"step": 138
},
{
"epoch": 0.6889714993804213,
"grad_norm": 1.1026114225387573,
"learning_rate": 0.0001711366538952746,
"loss": 2.6158,
"step": 139
},
{
"epoch": 0.6939281288723668,
"grad_norm": 1.2949557304382324,
"learning_rate": 0.00017088122605363984,
"loss": 2.8041,
"step": 140
},
{
"epoch": 0.6988847583643123,
"grad_norm": 1.1252179145812988,
"learning_rate": 0.0001706257982120051,
"loss": 2.6227,
"step": 141
},
{
"epoch": 0.7038413878562577,
"grad_norm": 1.2461496591567993,
"learning_rate": 0.00017037037037037037,
"loss": 2.9447,
"step": 142
},
{
"epoch": 0.7087980173482032,
"grad_norm": 1.0550256967544556,
"learning_rate": 0.00017011494252873563,
"loss": 2.5931,
"step": 143
},
{
"epoch": 0.7137546468401487,
"grad_norm": 1.1814954280853271,
"learning_rate": 0.0001698595146871009,
"loss": 2.6902,
"step": 144
},
{
"epoch": 0.7187112763320942,
"grad_norm": 1.3461589813232422,
"learning_rate": 0.00016960408684546616,
"loss": 3.1877,
"step": 145
},
{
"epoch": 0.7236679058240396,
"grad_norm": 1.5511823892593384,
"learning_rate": 0.00016934865900383142,
"loss": 2.8915,
"step": 146
},
{
"epoch": 0.7286245353159851,
"grad_norm": 1.1968817710876465,
"learning_rate": 0.00016909323116219669,
"loss": 2.9987,
"step": 147
},
{
"epoch": 0.7335811648079306,
"grad_norm": 1.4092822074890137,
"learning_rate": 0.00016883780332056195,
"loss": 2.8595,
"step": 148
},
{
"epoch": 0.7385377942998761,
"grad_norm": 1.397154450416565,
"learning_rate": 0.0001685823754789272,
"loss": 2.8689,
"step": 149
},
{
"epoch": 0.7434944237918215,
"grad_norm": 1.3408515453338623,
"learning_rate": 0.00016832694763729248,
"loss": 2.8842,
"step": 150
},
{
"epoch": 0.748451053283767,
"grad_norm": 1.8174018859863281,
"learning_rate": 0.00016807151979565774,
"loss": 3.3432,
"step": 151
},
{
"epoch": 0.7534076827757125,
"grad_norm": 1.179330587387085,
"learning_rate": 0.000167816091954023,
"loss": 2.5291,
"step": 152
},
{
"epoch": 0.758364312267658,
"grad_norm": 1.308836579322815,
"learning_rate": 0.00016756066411238827,
"loss": 2.7799,
"step": 153
},
{
"epoch": 0.7633209417596035,
"grad_norm": 1.3677353858947754,
"learning_rate": 0.00016730523627075353,
"loss": 2.8872,
"step": 154
},
{
"epoch": 0.7682775712515489,
"grad_norm": 1.2296689748764038,
"learning_rate": 0.0001670498084291188,
"loss": 3.137,
"step": 155
},
{
"epoch": 0.7732342007434945,
"grad_norm": 1.2084614038467407,
"learning_rate": 0.00016679438058748406,
"loss": 2.8232,
"step": 156
},
{
"epoch": 0.7781908302354399,
"grad_norm": 1.1933956146240234,
"learning_rate": 0.0001665389527458493,
"loss": 3.0647,
"step": 157
},
{
"epoch": 0.7831474597273854,
"grad_norm": 1.175970435142517,
"learning_rate": 0.00016628352490421458,
"loss": 2.3696,
"step": 158
},
{
"epoch": 0.7881040892193308,
"grad_norm": 1.1459221839904785,
"learning_rate": 0.00016602809706257982,
"loss": 2.5382,
"step": 159
},
{
"epoch": 0.7930607187112764,
"grad_norm": 1.195381760597229,
"learning_rate": 0.0001657726692209451,
"loss": 2.457,
"step": 160
},
{
"epoch": 0.7980173482032218,
"grad_norm": 1.337165117263794,
"learning_rate": 0.00016551724137931035,
"loss": 2.5121,
"step": 161
},
{
"epoch": 0.8029739776951673,
"grad_norm": 1.3737194538116455,
"learning_rate": 0.00016526181353767564,
"loss": 2.9961,
"step": 162
},
{
"epoch": 0.8079306071871127,
"grad_norm": 1.63533353805542,
"learning_rate": 0.00016500638569604087,
"loss": 3.2549,
"step": 163
},
{
"epoch": 0.8128872366790583,
"grad_norm": 1.2384507656097412,
"learning_rate": 0.00016475095785440614,
"loss": 2.8821,
"step": 164
},
{
"epoch": 0.8178438661710037,
"grad_norm": 1.2741832733154297,
"learning_rate": 0.0001644955300127714,
"loss": 2.8996,
"step": 165
},
{
"epoch": 0.8228004956629492,
"grad_norm": 1.559248685836792,
"learning_rate": 0.00016424010217113666,
"loss": 3.1911,
"step": 166
},
{
"epoch": 0.8277571251548946,
"grad_norm": 1.3192209005355835,
"learning_rate": 0.00016398467432950193,
"loss": 2.6993,
"step": 167
},
{
"epoch": 0.8327137546468402,
"grad_norm": 1.1915833950042725,
"learning_rate": 0.0001637292464878672,
"loss": 2.6233,
"step": 168
},
{
"epoch": 0.8376703841387856,
"grad_norm": 1.2560778856277466,
"learning_rate": 0.00016347381864623245,
"loss": 2.6467,
"step": 169
},
{
"epoch": 0.8426270136307311,
"grad_norm": 1.3174892663955688,
"learning_rate": 0.00016321839080459772,
"loss": 2.4764,
"step": 170
},
{
"epoch": 0.8475836431226765,
"grad_norm": 1.3117812871932983,
"learning_rate": 0.00016296296296296295,
"loss": 2.5733,
"step": 171
},
{
"epoch": 0.8525402726146221,
"grad_norm": 1.240525722503662,
"learning_rate": 0.00016270753512132824,
"loss": 2.6198,
"step": 172
},
{
"epoch": 0.8574969021065675,
"grad_norm": 1.66138756275177,
"learning_rate": 0.00016245210727969348,
"loss": 3.3568,
"step": 173
},
{
"epoch": 0.862453531598513,
"grad_norm": 1.5758461952209473,
"learning_rate": 0.00016219667943805877,
"loss": 2.7601,
"step": 174
},
{
"epoch": 0.8674101610904585,
"grad_norm": 1.1617976427078247,
"learning_rate": 0.000161941251596424,
"loss": 2.5865,
"step": 175
},
{
"epoch": 0.872366790582404,
"grad_norm": 1.435559868812561,
"learning_rate": 0.0001616858237547893,
"loss": 2.8164,
"step": 176
},
{
"epoch": 0.8773234200743495,
"grad_norm": 1.208302617073059,
"learning_rate": 0.00016143039591315453,
"loss": 2.5785,
"step": 177
},
{
"epoch": 0.8822800495662949,
"grad_norm": 1.4940738677978516,
"learning_rate": 0.0001611749680715198,
"loss": 2.7235,
"step": 178
},
{
"epoch": 0.8872366790582404,
"grad_norm": 1.135846495628357,
"learning_rate": 0.00016091954022988506,
"loss": 2.3321,
"step": 179
},
{
"epoch": 0.8921933085501859,
"grad_norm": 1.2706923484802246,
"learning_rate": 0.00016066411238825032,
"loss": 2.5422,
"step": 180
},
{
"epoch": 0.8971499380421314,
"grad_norm": 1.3885927200317383,
"learning_rate": 0.00016040868454661559,
"loss": 2.8623,
"step": 181
},
{
"epoch": 0.9021065675340768,
"grad_norm": 1.4354758262634277,
"learning_rate": 0.00016015325670498085,
"loss": 2.3397,
"step": 182
},
{
"epoch": 0.9070631970260223,
"grad_norm": 1.3210114240646362,
"learning_rate": 0.0001598978288633461,
"loss": 2.5658,
"step": 183
},
{
"epoch": 0.9120198265179678,
"grad_norm": 1.5271670818328857,
"learning_rate": 0.00015964240102171138,
"loss": 2.7153,
"step": 184
},
{
"epoch": 0.9169764560099133,
"grad_norm": 1.3032946586608887,
"learning_rate": 0.00015938697318007664,
"loss": 2.4854,
"step": 185
},
{
"epoch": 0.9219330855018587,
"grad_norm": 1.3772252798080444,
"learning_rate": 0.0001591315453384419,
"loss": 2.5493,
"step": 186
},
{
"epoch": 0.9268897149938042,
"grad_norm": 1.344874382019043,
"learning_rate": 0.00015887611749680717,
"loss": 2.6904,
"step": 187
},
{
"epoch": 0.9318463444857497,
"grad_norm": 1.4352842569351196,
"learning_rate": 0.00015862068965517243,
"loss": 2.7214,
"step": 188
},
{
"epoch": 0.9368029739776952,
"grad_norm": 1.17839515209198,
"learning_rate": 0.0001583652618135377,
"loss": 2.1581,
"step": 189
},
{
"epoch": 0.9417596034696406,
"grad_norm": 1.3339844942092896,
"learning_rate": 0.00015810983397190296,
"loss": 2.4722,
"step": 190
},
{
"epoch": 0.9467162329615861,
"grad_norm": 1.3919591903686523,
"learning_rate": 0.00015785440613026822,
"loss": 2.4386,
"step": 191
},
{
"epoch": 0.9516728624535316,
"grad_norm": 1.4500068426132202,
"learning_rate": 0.00015759897828863346,
"loss": 2.5789,
"step": 192
},
{
"epoch": 0.9566294919454771,
"grad_norm": 1.3232927322387695,
"learning_rate": 0.00015734355044699875,
"loss": 2.5556,
"step": 193
},
{
"epoch": 0.9615861214374225,
"grad_norm": 1.56577467918396,
"learning_rate": 0.00015708812260536398,
"loss": 2.8504,
"step": 194
},
{
"epoch": 0.966542750929368,
"grad_norm": 1.3413156270980835,
"learning_rate": 0.00015683269476372927,
"loss": 2.2172,
"step": 195
},
{
"epoch": 0.9714993804213135,
"grad_norm": 1.5769926309585571,
"learning_rate": 0.0001565772669220945,
"loss": 3.1141,
"step": 196
},
{
"epoch": 0.976456009913259,
"grad_norm": 1.3165156841278076,
"learning_rate": 0.0001563218390804598,
"loss": 2.6951,
"step": 197
},
{
"epoch": 0.9814126394052045,
"grad_norm": 1.384596347808838,
"learning_rate": 0.00015606641123882504,
"loss": 2.8985,
"step": 198
},
{
"epoch": 0.9863692688971499,
"grad_norm": 1.2764040231704712,
"learning_rate": 0.0001558109833971903,
"loss": 2.2731,
"step": 199
},
{
"epoch": 0.9913258983890955,
"grad_norm": 1.3206219673156738,
"learning_rate": 0.00015555555555555556,
"loss": 2.4783,
"step": 200
},
{
"epoch": 0.9962825278810409,
"grad_norm": 1.1644648313522339,
"learning_rate": 0.00015530012771392083,
"loss": 2.3293,
"step": 201
},
{
"epoch": 1.0,
"grad_norm": 1.5087392330169678,
"learning_rate": 0.0001550446998722861,
"loss": 1.9488,
"step": 202
},
{
"epoch": 1.0,
"eval_loss": 2.729416608810425,
"eval_runtime": 22.6846,
"eval_samples_per_second": 17.809,
"eval_steps_per_second": 2.248,
"step": 202
},
{
"epoch": 1.0049566294919454,
"grad_norm": 1.1808276176452637,
"learning_rate": 0.00015478927203065135,
"loss": 2.45,
"step": 203
},
{
"epoch": 1.009913258983891,
"grad_norm": 1.3719933032989502,
"learning_rate": 0.00015453384418901662,
"loss": 2.4187,
"step": 204
},
{
"epoch": 1.0148698884758365,
"grad_norm": 1.4881116151809692,
"learning_rate": 0.00015427841634738188,
"loss": 2.7559,
"step": 205
},
{
"epoch": 1.019826517967782,
"grad_norm": 1.317412257194519,
"learning_rate": 0.00015402298850574712,
"loss": 2.4683,
"step": 206
},
{
"epoch": 1.0247831474597273,
"grad_norm": 1.2034761905670166,
"learning_rate": 0.0001537675606641124,
"loss": 2.3886,
"step": 207
},
{
"epoch": 1.029739776951673,
"grad_norm": 1.5965017080307007,
"learning_rate": 0.00015351213282247764,
"loss": 2.5819,
"step": 208
},
{
"epoch": 1.0346964064436184,
"grad_norm": 1.6514837741851807,
"learning_rate": 0.00015325670498084293,
"loss": 2.7572,
"step": 209
},
{
"epoch": 1.0396530359355638,
"grad_norm": 1.4627822637557983,
"learning_rate": 0.00015300127713920817,
"loss": 2.8254,
"step": 210
},
{
"epoch": 1.0446096654275092,
"grad_norm": 1.5069350004196167,
"learning_rate": 0.00015274584929757346,
"loss": 2.6277,
"step": 211
},
{
"epoch": 1.0495662949194549,
"grad_norm": 1.3964656591415405,
"learning_rate": 0.0001524904214559387,
"loss": 2.458,
"step": 212
},
{
"epoch": 1.0545229244114003,
"grad_norm": 1.5406875610351562,
"learning_rate": 0.00015223499361430396,
"loss": 2.5876,
"step": 213
},
{
"epoch": 1.0594795539033457,
"grad_norm": 1.3873964548110962,
"learning_rate": 0.00015197956577266922,
"loss": 2.1244,
"step": 214
},
{
"epoch": 1.0644361833952911,
"grad_norm": 1.5236468315124512,
"learning_rate": 0.00015172413793103449,
"loss": 2.4087,
"step": 215
},
{
"epoch": 1.0693928128872368,
"grad_norm": 1.5236297845840454,
"learning_rate": 0.00015146871008939975,
"loss": 2.5084,
"step": 216
},
{
"epoch": 1.0743494423791822,
"grad_norm": 1.3550326824188232,
"learning_rate": 0.000151213282247765,
"loss": 2.7363,
"step": 217
},
{
"epoch": 1.0793060718711276,
"grad_norm": 1.3009722232818604,
"learning_rate": 0.00015095785440613028,
"loss": 2.4046,
"step": 218
},
{
"epoch": 1.084262701363073,
"grad_norm": 1.3609213829040527,
"learning_rate": 0.00015070242656449554,
"loss": 2.5477,
"step": 219
},
{
"epoch": 1.0892193308550187,
"grad_norm": 1.5530016422271729,
"learning_rate": 0.00015044699872286078,
"loss": 2.677,
"step": 220
},
{
"epoch": 1.094175960346964,
"grad_norm": 1.566308856010437,
"learning_rate": 0.00015019157088122607,
"loss": 2.6243,
"step": 221
},
{
"epoch": 1.0991325898389095,
"grad_norm": 1.6015573740005493,
"learning_rate": 0.0001499361430395913,
"loss": 2.4592,
"step": 222
},
{
"epoch": 1.104089219330855,
"grad_norm": 2.0257885456085205,
"learning_rate": 0.0001496807151979566,
"loss": 3.2142,
"step": 223
},
{
"epoch": 1.1090458488228006,
"grad_norm": 1.599144458770752,
"learning_rate": 0.00014942528735632183,
"loss": 2.5635,
"step": 224
},
{
"epoch": 1.114002478314746,
"grad_norm": 1.4779815673828125,
"learning_rate": 0.00014916985951468712,
"loss": 2.3515,
"step": 225
},
{
"epoch": 1.1189591078066914,
"grad_norm": 1.6670912504196167,
"learning_rate": 0.00014891443167305236,
"loss": 2.5193,
"step": 226
},
{
"epoch": 1.123915737298637,
"grad_norm": 1.4645159244537354,
"learning_rate": 0.00014865900383141765,
"loss": 2.4852,
"step": 227
},
{
"epoch": 1.1288723667905824,
"grad_norm": 1.39253568649292,
"learning_rate": 0.00014840357598978288,
"loss": 2.7357,
"step": 228
},
{
"epoch": 1.1338289962825279,
"grad_norm": 1.5711032152175903,
"learning_rate": 0.00014814814814814815,
"loss": 2.6649,
"step": 229
},
{
"epoch": 1.1387856257744733,
"grad_norm": 1.3611880540847778,
"learning_rate": 0.0001478927203065134,
"loss": 2.3486,
"step": 230
},
{
"epoch": 1.143742255266419,
"grad_norm": 1.439810872077942,
"learning_rate": 0.00014763729246487867,
"loss": 2.1991,
"step": 231
},
{
"epoch": 1.1486988847583643,
"grad_norm": 1.7450776100158691,
"learning_rate": 0.00014738186462324394,
"loss": 2.7248,
"step": 232
},
{
"epoch": 1.1536555142503098,
"grad_norm": 1.5478737354278564,
"learning_rate": 0.0001471264367816092,
"loss": 2.3492,
"step": 233
},
{
"epoch": 1.1586121437422552,
"grad_norm": 1.4742021560668945,
"learning_rate": 0.00014687100893997446,
"loss": 2.3586,
"step": 234
},
{
"epoch": 1.1635687732342008,
"grad_norm": 1.6907175779342651,
"learning_rate": 0.00014661558109833973,
"loss": 2.8715,
"step": 235
},
{
"epoch": 1.1685254027261462,
"grad_norm": 1.5488353967666626,
"learning_rate": 0.000146360153256705,
"loss": 2.7357,
"step": 236
},
{
"epoch": 1.1734820322180917,
"grad_norm": 1.6616300344467163,
"learning_rate": 0.00014610472541507025,
"loss": 2.3508,
"step": 237
},
{
"epoch": 1.178438661710037,
"grad_norm": 1.592176914215088,
"learning_rate": 0.00014584929757343552,
"loss": 2.432,
"step": 238
},
{
"epoch": 1.1833952912019827,
"grad_norm": 1.5771480798721313,
"learning_rate": 0.00014559386973180078,
"loss": 2.8382,
"step": 239
},
{
"epoch": 1.1883519206939281,
"grad_norm": 1.664546251296997,
"learning_rate": 0.00014533844189016604,
"loss": 2.5342,
"step": 240
},
{
"epoch": 1.1933085501858736,
"grad_norm": 1.9009575843811035,
"learning_rate": 0.0001450830140485313,
"loss": 2.5563,
"step": 241
},
{
"epoch": 1.198265179677819,
"grad_norm": 1.6944457292556763,
"learning_rate": 0.00014482758620689657,
"loss": 2.7315,
"step": 242
},
{
"epoch": 1.2032218091697646,
"grad_norm": 1.4040586948394775,
"learning_rate": 0.0001445721583652618,
"loss": 2.2772,
"step": 243
},
{
"epoch": 1.20817843866171,
"grad_norm": 1.549829363822937,
"learning_rate": 0.0001443167305236271,
"loss": 2.593,
"step": 244
},
{
"epoch": 1.2131350681536555,
"grad_norm": 1.2948358058929443,
"learning_rate": 0.00014406130268199233,
"loss": 2.3032,
"step": 245
},
{
"epoch": 1.218091697645601,
"grad_norm": 1.5653175115585327,
"learning_rate": 0.00014380587484035762,
"loss": 2.2908,
"step": 246
},
{
"epoch": 1.2230483271375465,
"grad_norm": 1.5301649570465088,
"learning_rate": 0.00014355044699872286,
"loss": 2.6958,
"step": 247
},
{
"epoch": 1.228004956629492,
"grad_norm": 1.6121726036071777,
"learning_rate": 0.00014329501915708815,
"loss": 2.5315,
"step": 248
},
{
"epoch": 1.2329615861214374,
"grad_norm": 1.6542530059814453,
"learning_rate": 0.00014303959131545339,
"loss": 2.7279,
"step": 249
},
{
"epoch": 1.2379182156133828,
"grad_norm": 1.5968433618545532,
"learning_rate": 0.00014278416347381865,
"loss": 2.1316,
"step": 250
},
{
"epoch": 1.2428748451053284,
"grad_norm": 1.753065824508667,
"learning_rate": 0.0001425287356321839,
"loss": 2.7854,
"step": 251
},
{
"epoch": 1.2478314745972738,
"grad_norm": 1.5655628442764282,
"learning_rate": 0.00014227330779054918,
"loss": 2.2109,
"step": 252
},
{
"epoch": 1.2527881040892193,
"grad_norm": 1.6386921405792236,
"learning_rate": 0.00014201787994891444,
"loss": 2.6289,
"step": 253
},
{
"epoch": 1.257744733581165,
"grad_norm": 1.389952301979065,
"learning_rate": 0.0001417624521072797,
"loss": 2.2817,
"step": 254
},
{
"epoch": 1.2627013630731103,
"grad_norm": 1.5647423267364502,
"learning_rate": 0.00014150702426564497,
"loss": 2.3569,
"step": 255
},
{
"epoch": 1.2676579925650557,
"grad_norm": 1.5217468738555908,
"learning_rate": 0.00014125159642401023,
"loss": 2.4843,
"step": 256
},
{
"epoch": 1.2726146220570014,
"grad_norm": 1.5946449041366577,
"learning_rate": 0.00014099616858237547,
"loss": 2.482,
"step": 257
},
{
"epoch": 1.2775712515489468,
"grad_norm": 2.6357760429382324,
"learning_rate": 0.00014074074074074076,
"loss": 3.0227,
"step": 258
},
{
"epoch": 1.2825278810408922,
"grad_norm": 1.6973539590835571,
"learning_rate": 0.000140485312899106,
"loss": 2.3939,
"step": 259
},
{
"epoch": 1.2874845105328376,
"grad_norm": 1.8628802299499512,
"learning_rate": 0.00014022988505747128,
"loss": 2.5089,
"step": 260
},
{
"epoch": 1.292441140024783,
"grad_norm": 1.8481550216674805,
"learning_rate": 0.00013997445721583652,
"loss": 2.3706,
"step": 261
},
{
"epoch": 1.2973977695167287,
"grad_norm": 1.4677447080612183,
"learning_rate": 0.0001397190293742018,
"loss": 2.5831,
"step": 262
},
{
"epoch": 1.3023543990086741,
"grad_norm": 1.3649961948394775,
"learning_rate": 0.00013946360153256705,
"loss": 2.2881,
"step": 263
},
{
"epoch": 1.3073110285006195,
"grad_norm": 1.6258682012557983,
"learning_rate": 0.0001392081736909323,
"loss": 2.326,
"step": 264
},
{
"epoch": 1.3122676579925652,
"grad_norm": 1.869107961654663,
"learning_rate": 0.00013895274584929757,
"loss": 2.876,
"step": 265
},
{
"epoch": 1.3172242874845106,
"grad_norm": 1.6073962450027466,
"learning_rate": 0.00013869731800766284,
"loss": 2.3673,
"step": 266
},
{
"epoch": 1.322180916976456,
"grad_norm": 1.6778944730758667,
"learning_rate": 0.0001384418901660281,
"loss": 2.5112,
"step": 267
},
{
"epoch": 1.3271375464684014,
"grad_norm": 1.7758762836456299,
"learning_rate": 0.00013818646232439336,
"loss": 2.5872,
"step": 268
},
{
"epoch": 1.3320941759603468,
"grad_norm": 1.609081745147705,
"learning_rate": 0.00013793103448275863,
"loss": 2.4718,
"step": 269
},
{
"epoch": 1.3370508054522925,
"grad_norm": 1.602964162826538,
"learning_rate": 0.0001376756066411239,
"loss": 2.2268,
"step": 270
},
{
"epoch": 1.342007434944238,
"grad_norm": 1.532798171043396,
"learning_rate": 0.00013742017879948915,
"loss": 2.4378,
"step": 271
},
{
"epoch": 1.3469640644361833,
"grad_norm": 1.4932005405426025,
"learning_rate": 0.00013716475095785442,
"loss": 2.6889,
"step": 272
},
{
"epoch": 1.351920693928129,
"grad_norm": 1.6818208694458008,
"learning_rate": 0.00013690932311621968,
"loss": 2.6538,
"step": 273
},
{
"epoch": 1.3568773234200744,
"grad_norm": 1.4763221740722656,
"learning_rate": 0.00013665389527458494,
"loss": 2.1908,
"step": 274
},
{
"epoch": 1.3618339529120198,
"grad_norm": 1.4615814685821533,
"learning_rate": 0.0001363984674329502,
"loss": 2.2983,
"step": 275
},
{
"epoch": 1.3667905824039652,
"grad_norm": 1.5107206106185913,
"learning_rate": 0.00013614303959131547,
"loss": 2.2305,
"step": 276
},
{
"epoch": 1.3717472118959106,
"grad_norm": 1.4505106210708618,
"learning_rate": 0.00013588761174968073,
"loss": 2.1633,
"step": 277
},
{
"epoch": 1.3767038413878563,
"grad_norm": 1.7257494926452637,
"learning_rate": 0.00013563218390804597,
"loss": 2.3903,
"step": 278
},
{
"epoch": 1.3816604708798017,
"grad_norm": 1.8901737928390503,
"learning_rate": 0.00013537675606641126,
"loss": 2.5404,
"step": 279
},
{
"epoch": 1.3866171003717471,
"grad_norm": 1.5865846872329712,
"learning_rate": 0.0001351213282247765,
"loss": 2.5774,
"step": 280
},
{
"epoch": 1.3915737298636928,
"grad_norm": 1.631974458694458,
"learning_rate": 0.0001348659003831418,
"loss": 2.2341,
"step": 281
},
{
"epoch": 1.3965303593556382,
"grad_norm": 1.7717571258544922,
"learning_rate": 0.00013461047254150702,
"loss": 2.1569,
"step": 282
},
{
"epoch": 1.4014869888475836,
"grad_norm": 1.4758812189102173,
"learning_rate": 0.0001343550446998723,
"loss": 2.0374,
"step": 283
},
{
"epoch": 1.4064436183395292,
"grad_norm": 1.8754217624664307,
"learning_rate": 0.00013409961685823755,
"loss": 2.4035,
"step": 284
},
{
"epoch": 1.4114002478314747,
"grad_norm": 1.8739930391311646,
"learning_rate": 0.0001338441890166028,
"loss": 2.5747,
"step": 285
},
{
"epoch": 1.41635687732342,
"grad_norm": 1.6370917558670044,
"learning_rate": 0.00013358876117496808,
"loss": 2.4051,
"step": 286
},
{
"epoch": 1.4213135068153655,
"grad_norm": 1.6480002403259277,
"learning_rate": 0.00013333333333333334,
"loss": 2.4366,
"step": 287
},
{
"epoch": 1.426270136307311,
"grad_norm": 1.7106711864471436,
"learning_rate": 0.0001330779054916986,
"loss": 2.4036,
"step": 288
},
{
"epoch": 1.4312267657992566,
"grad_norm": 1.5775083303451538,
"learning_rate": 0.00013282247765006387,
"loss": 2.2505,
"step": 289
},
{
"epoch": 1.436183395291202,
"grad_norm": 1.9434142112731934,
"learning_rate": 0.00013256704980842913,
"loss": 2.8395,
"step": 290
},
{
"epoch": 1.4411400247831474,
"grad_norm": 1.7857332229614258,
"learning_rate": 0.0001323116219667944,
"loss": 2.925,
"step": 291
},
{
"epoch": 1.446096654275093,
"grad_norm": 1.8186098337173462,
"learning_rate": 0.00013205619412515963,
"loss": 2.5975,
"step": 292
},
{
"epoch": 1.4510532837670385,
"grad_norm": 1.682557225227356,
"learning_rate": 0.00013180076628352492,
"loss": 2.3786,
"step": 293
},
{
"epoch": 1.4560099132589839,
"grad_norm": 1.7102502584457397,
"learning_rate": 0.00013154533844189016,
"loss": 2.5359,
"step": 294
},
{
"epoch": 1.4609665427509293,
"grad_norm": 1.7254425287246704,
"learning_rate": 0.00013128991060025545,
"loss": 2.4446,
"step": 295
},
{
"epoch": 1.4659231722428747,
"grad_norm": 1.6428650617599487,
"learning_rate": 0.00013103448275862068,
"loss": 2.7514,
"step": 296
},
{
"epoch": 1.4708798017348204,
"grad_norm": 1.7002222537994385,
"learning_rate": 0.00013077905491698597,
"loss": 2.3079,
"step": 297
},
{
"epoch": 1.4758364312267658,
"grad_norm": 2.013287305831909,
"learning_rate": 0.0001305236270753512,
"loss": 2.5104,
"step": 298
},
{
"epoch": 1.4807930607187112,
"grad_norm": 1.5022965669631958,
"learning_rate": 0.00013026819923371647,
"loss": 2.1304,
"step": 299
},
{
"epoch": 1.4857496902106568,
"grad_norm": 1.9274829626083374,
"learning_rate": 0.00013001277139208174,
"loss": 2.6626,
"step": 300
},
{
"epoch": 1.4907063197026023,
"grad_norm": 1.9029258489608765,
"learning_rate": 0.000129757343550447,
"loss": 2.6778,
"step": 301
},
{
"epoch": 1.4956629491945477,
"grad_norm": 1.4256715774536133,
"learning_rate": 0.00012950191570881226,
"loss": 2.4563,
"step": 302
},
{
"epoch": 1.5006195786864933,
"grad_norm": 1.716525673866272,
"learning_rate": 0.00012924648786717753,
"loss": 2.5675,
"step": 303
},
{
"epoch": 1.5055762081784385,
"grad_norm": 1.6712944507598877,
"learning_rate": 0.0001289910600255428,
"loss": 2.2456,
"step": 304
},
{
"epoch": 1.5105328376703842,
"grad_norm": 1.4323303699493408,
"learning_rate": 0.00012873563218390805,
"loss": 2.3167,
"step": 305
},
{
"epoch": 1.5154894671623296,
"grad_norm": 1.562535285949707,
"learning_rate": 0.00012848020434227332,
"loss": 2.2113,
"step": 306
},
{
"epoch": 1.520446096654275,
"grad_norm": 1.8023464679718018,
"learning_rate": 0.00012822477650063858,
"loss": 2.5698,
"step": 307
},
{
"epoch": 1.5254027261462206,
"grad_norm": 1.560618281364441,
"learning_rate": 0.00012796934865900382,
"loss": 2.7733,
"step": 308
},
{
"epoch": 1.530359355638166,
"grad_norm": 1.6257753372192383,
"learning_rate": 0.0001277139208173691,
"loss": 2.5203,
"step": 309
},
{
"epoch": 1.5353159851301115,
"grad_norm": 1.9268074035644531,
"learning_rate": 0.00012745849297573434,
"loss": 2.4339,
"step": 310
},
{
"epoch": 1.5402726146220571,
"grad_norm": 1.728567123413086,
"learning_rate": 0.00012720306513409963,
"loss": 2.3867,
"step": 311
},
{
"epoch": 1.5452292441140025,
"grad_norm": 2.204632043838501,
"learning_rate": 0.00012694763729246487,
"loss": 2.8223,
"step": 312
},
{
"epoch": 1.550185873605948,
"grad_norm": 1.53752863407135,
"learning_rate": 0.00012669220945083016,
"loss": 2.4653,
"step": 313
},
{
"epoch": 1.5551425030978936,
"grad_norm": 1.5143874883651733,
"learning_rate": 0.0001264367816091954,
"loss": 2.1158,
"step": 314
},
{
"epoch": 1.5600991325898388,
"grad_norm": 1.635250449180603,
"learning_rate": 0.00012618135376756066,
"loss": 2.1365,
"step": 315
},
{
"epoch": 1.5650557620817844,
"grad_norm": 1.4665566682815552,
"learning_rate": 0.00012592592592592592,
"loss": 2.3736,
"step": 316
},
{
"epoch": 1.5700123915737298,
"grad_norm": 1.8319354057312012,
"learning_rate": 0.00012567049808429119,
"loss": 2.5109,
"step": 317
},
{
"epoch": 1.5749690210656753,
"grad_norm": 1.8809919357299805,
"learning_rate": 0.00012541507024265645,
"loss": 2.5676,
"step": 318
},
{
"epoch": 1.579925650557621,
"grad_norm": 1.693365216255188,
"learning_rate": 0.0001251596424010217,
"loss": 2.362,
"step": 319
},
{
"epoch": 1.5848822800495663,
"grad_norm": 1.568253517150879,
"learning_rate": 0.00012490421455938698,
"loss": 2.221,
"step": 320
},
{
"epoch": 1.5898389095415117,
"grad_norm": 1.5762134790420532,
"learning_rate": 0.00012464878671775224,
"loss": 2.5011,
"step": 321
},
{
"epoch": 1.5947955390334574,
"grad_norm": 1.5680880546569824,
"learning_rate": 0.0001243933588761175,
"loss": 2.1637,
"step": 322
},
{
"epoch": 1.5997521685254026,
"grad_norm": 1.5295878648757935,
"learning_rate": 0.00012413793103448277,
"loss": 2.1718,
"step": 323
},
{
"epoch": 1.6047087980173482,
"grad_norm": 1.3603239059448242,
"learning_rate": 0.00012388250319284803,
"loss": 2.1767,
"step": 324
},
{
"epoch": 1.6096654275092936,
"grad_norm": 1.6356030702590942,
"learning_rate": 0.0001236270753512133,
"loss": 2.8308,
"step": 325
},
{
"epoch": 1.614622057001239,
"grad_norm": 1.5502631664276123,
"learning_rate": 0.00012337164750957856,
"loss": 2.0726,
"step": 326
},
{
"epoch": 1.6195786864931847,
"grad_norm": 1.564531922340393,
"learning_rate": 0.00012311621966794382,
"loss": 2.283,
"step": 327
},
{
"epoch": 1.6245353159851301,
"grad_norm": 1.775214433670044,
"learning_rate": 0.00012286079182630908,
"loss": 2.3739,
"step": 328
},
{
"epoch": 1.6294919454770755,
"grad_norm": 1.7285168170928955,
"learning_rate": 0.00012260536398467432,
"loss": 2.6,
"step": 329
},
{
"epoch": 1.6344485749690212,
"grad_norm": 1.9098368883132935,
"learning_rate": 0.0001223499361430396,
"loss": 2.6702,
"step": 330
},
{
"epoch": 1.6394052044609664,
"grad_norm": 1.8068279027938843,
"learning_rate": 0.00012209450830140485,
"loss": 2.8159,
"step": 331
},
{
"epoch": 1.644361833952912,
"grad_norm": 1.422575831413269,
"learning_rate": 0.00012183908045977012,
"loss": 1.9348,
"step": 332
},
{
"epoch": 1.6493184634448577,
"grad_norm": 1.873787522315979,
"learning_rate": 0.00012158365261813537,
"loss": 2.6505,
"step": 333
},
{
"epoch": 1.6542750929368029,
"grad_norm": 1.653365135192871,
"learning_rate": 0.00012132822477650065,
"loss": 2.4581,
"step": 334
},
{
"epoch": 1.6592317224287485,
"grad_norm": 1.5894263982772827,
"learning_rate": 0.0001210727969348659,
"loss": 2.4804,
"step": 335
},
{
"epoch": 1.664188351920694,
"grad_norm": 1.6645914316177368,
"learning_rate": 0.00012081736909323116,
"loss": 2.6777,
"step": 336
},
{
"epoch": 1.6691449814126393,
"grad_norm": 1.3606253862380981,
"learning_rate": 0.00012056194125159643,
"loss": 2.1287,
"step": 337
},
{
"epoch": 1.674101610904585,
"grad_norm": 1.9518952369689941,
"learning_rate": 0.00012030651340996169,
"loss": 2.4308,
"step": 338
},
{
"epoch": 1.6790582403965304,
"grad_norm": 1.837501883506775,
"learning_rate": 0.00012005108556832695,
"loss": 2.5096,
"step": 339
},
{
"epoch": 1.6840148698884758,
"grad_norm": 1.681298851966858,
"learning_rate": 0.00011979565772669222,
"loss": 1.9996,
"step": 340
},
{
"epoch": 1.6889714993804215,
"grad_norm": 1.6063026189804077,
"learning_rate": 0.00011954022988505748,
"loss": 1.8223,
"step": 341
},
{
"epoch": 1.6939281288723667,
"grad_norm": 1.7527902126312256,
"learning_rate": 0.00011928480204342274,
"loss": 2.3558,
"step": 342
},
{
"epoch": 1.6988847583643123,
"grad_norm": 2.1108145713806152,
"learning_rate": 0.00011902937420178799,
"loss": 2.4522,
"step": 343
},
{
"epoch": 1.7038413878562577,
"grad_norm": 1.9739495515823364,
"learning_rate": 0.00011877394636015327,
"loss": 2.5235,
"step": 344
},
{
"epoch": 1.7087980173482031,
"grad_norm": NaN,
"learning_rate": 0.00011851851851851852,
"loss": 2.8147,
"step": 345
},
{
"epoch": 1.7137546468401488,
"grad_norm": 1.6384708881378174,
"learning_rate": 0.00011851851851851852,
"loss": 2.2734,
"step": 346
},
{
"epoch": 1.7187112763320942,
"grad_norm": 1.6661678552627563,
"learning_rate": 0.0001182630906768838,
"loss": 2.4438,
"step": 347
},
{
"epoch": 1.7236679058240396,
"grad_norm": 1.987106442451477,
"learning_rate": 0.00011800766283524905,
"loss": 2.4275,
"step": 348
},
{
"epoch": 1.7286245353159853,
"grad_norm": 1.9400545358657837,
"learning_rate": 0.00011775223499361432,
"loss": 2.446,
"step": 349
},
{
"epoch": 1.7335811648079305,
"grad_norm": 1.7634007930755615,
"learning_rate": 0.00011749680715197957,
"loss": 2.2731,
"step": 350
},
{
"epoch": 1.738537794299876,
"grad_norm": 1.5254379510879517,
"learning_rate": 0.00011724137931034482,
"loss": 1.8172,
"step": 351
},
{
"epoch": 1.7434944237918215,
"grad_norm": 1.9841443300247192,
"learning_rate": 0.0001169859514687101,
"loss": 2.4537,
"step": 352
},
{
"epoch": 1.748451053283767,
"grad_norm": 2.1015896797180176,
"learning_rate": 0.00011673052362707535,
"loss": 2.1979,
"step": 353
},
{
"epoch": 1.7534076827757126,
"grad_norm": 1.8992552757263184,
"learning_rate": 0.00011647509578544063,
"loss": 2.145,
"step": 354
},
{
"epoch": 1.758364312267658,
"grad_norm": 2.1565067768096924,
"learning_rate": 0.00011621966794380588,
"loss": 2.4881,
"step": 355
},
{
"epoch": 1.7633209417596034,
"grad_norm": 1.8605690002441406,
"learning_rate": 0.00011596424010217115,
"loss": 2.5486,
"step": 356
},
{
"epoch": 1.768277571251549,
"grad_norm": 1.8879384994506836,
"learning_rate": 0.0001157088122605364,
"loss": 2.448,
"step": 357
},
{
"epoch": 1.7732342007434945,
"grad_norm": 2.0151591300964355,
"learning_rate": 0.00011545338441890165,
"loss": 2.4967,
"step": 358
},
{
"epoch": 1.77819083023544,
"grad_norm": 1.6546062231063843,
"learning_rate": 0.00011519795657726693,
"loss": 2.5078,
"step": 359
},
{
"epoch": 1.7831474597273855,
"grad_norm": 2.1501290798187256,
"learning_rate": 0.00011494252873563218,
"loss": 2.3383,
"step": 360
},
{
"epoch": 1.7881040892193307,
"grad_norm": 1.8948010206222534,
"learning_rate": 0.00011468710089399746,
"loss": 2.3743,
"step": 361
},
{
"epoch": 1.7930607187112764,
"grad_norm": 1.9624435901641846,
"learning_rate": 0.0001144316730523627,
"loss": 2.3619,
"step": 362
},
{
"epoch": 1.7980173482032218,
"grad_norm": 2.0342910289764404,
"learning_rate": 0.00011417624521072798,
"loss": 2.4056,
"step": 363
},
{
"epoch": 1.8029739776951672,
"grad_norm": 1.964487910270691,
"learning_rate": 0.00011392081736909323,
"loss": 2.2912,
"step": 364
},
{
"epoch": 1.8079306071871128,
"grad_norm": 1.6878917217254639,
"learning_rate": 0.00011366538952745848,
"loss": 2.3647,
"step": 365
},
{
"epoch": 1.8128872366790583,
"grad_norm": 1.959184169769287,
"learning_rate": 0.00011340996168582376,
"loss": 2.0794,
"step": 366
},
{
"epoch": 1.8178438661710037,
"grad_norm": 1.7067630290985107,
"learning_rate": 0.00011315453384418901,
"loss": 2.5082,
"step": 367
},
{
"epoch": 1.8228004956629493,
"grad_norm": 1.7823041677474976,
"learning_rate": 0.00011289910600255429,
"loss": 2.0763,
"step": 368
},
{
"epoch": 1.8277571251548945,
"grad_norm": 1.9278924465179443,
"learning_rate": 0.00011264367816091954,
"loss": 2.5235,
"step": 369
},
{
"epoch": 1.8327137546468402,
"grad_norm": 1.8295596837997437,
"learning_rate": 0.00011238825031928481,
"loss": 2.836,
"step": 370
},
{
"epoch": 1.8376703841387856,
"grad_norm": 1.6599929332733154,
"learning_rate": 0.00011213282247765006,
"loss": 2.3678,
"step": 371
},
{
"epoch": 1.842627013630731,
"grad_norm": 2.075873851776123,
"learning_rate": 0.00011187739463601533,
"loss": 2.6875,
"step": 372
},
{
"epoch": 1.8475836431226766,
"grad_norm": 1.8870905637741089,
"learning_rate": 0.00011162196679438059,
"loss": 2.4988,
"step": 373
},
{
"epoch": 1.852540272614622,
"grad_norm": 1.7830430269241333,
"learning_rate": 0.00011136653895274585,
"loss": 2.0613,
"step": 374
},
{
"epoch": 1.8574969021065675,
"grad_norm": 1.8882161378860474,
"learning_rate": 0.00011111111111111112,
"loss": 2.627,
"step": 375
},
{
"epoch": 1.8624535315985131,
"grad_norm": 1.9912681579589844,
"learning_rate": 0.00011085568326947638,
"loss": 2.7047,
"step": 376
},
{
"epoch": 1.8674101610904585,
"grad_norm": 2.0505921840667725,
"learning_rate": 0.00011060025542784164,
"loss": 2.95,
"step": 377
},
{
"epoch": 1.872366790582404,
"grad_norm": 1.9979755878448486,
"learning_rate": 0.0001103448275862069,
"loss": 2.5598,
"step": 378
},
{
"epoch": 1.8773234200743496,
"grad_norm": 1.5586763620376587,
"learning_rate": 0.00011008939974457216,
"loss": 2.3973,
"step": 379
},
{
"epoch": 1.8822800495662948,
"grad_norm": 1.698810338973999,
"learning_rate": 0.00010983397190293743,
"loss": 2.1451,
"step": 380
},
{
"epoch": 1.8872366790582404,
"grad_norm": 2.1801044940948486,
"learning_rate": 0.00010957854406130268,
"loss": 2.5816,
"step": 381
},
{
"epoch": 1.8921933085501859,
"grad_norm": 1.511141300201416,
"learning_rate": 0.00010932311621966796,
"loss": 2.1513,
"step": 382
},
{
"epoch": 1.8971499380421313,
"grad_norm": 1.9838210344314575,
"learning_rate": 0.00010906768837803321,
"loss": 1.8924,
"step": 383
},
{
"epoch": 1.902106567534077,
"grad_norm": 1.9898924827575684,
"learning_rate": 0.00010881226053639849,
"loss": 2.4117,
"step": 384
},
{
"epoch": 1.9070631970260223,
"grad_norm": 2.090376615524292,
"learning_rate": 0.00010855683269476374,
"loss": 2.4818,
"step": 385
},
{
"epoch": 1.9120198265179678,
"grad_norm": 1.8976398706436157,
"learning_rate": 0.00010830140485312901,
"loss": 2.5878,
"step": 386
},
{
"epoch": 1.9169764560099134,
"grad_norm": 1.813851237297058,
"learning_rate": 0.00010804597701149426,
"loss": 2.1124,
"step": 387
},
{
"epoch": 1.9219330855018586,
"grad_norm": 1.9612343311309814,
"learning_rate": 0.00010779054916985951,
"loss": 2.5307,
"step": 388
},
{
"epoch": 1.9268897149938042,
"grad_norm": 2.0917444229125977,
"learning_rate": 0.00010753512132822479,
"loss": 2.4722,
"step": 389
},
{
"epoch": 1.9318463444857497,
"grad_norm": 1.9183920621871948,
"learning_rate": 0.00010727969348659004,
"loss": 2.2143,
"step": 390
},
{
"epoch": 1.936802973977695,
"grad_norm": 1.6060720682144165,
"learning_rate": 0.00010702426564495532,
"loss": 2.5327,
"step": 391
},
{
"epoch": 1.9417596034696407,
"grad_norm": 1.6848859786987305,
"learning_rate": 0.00010676883780332057,
"loss": 2.3799,
"step": 392
},
{
"epoch": 1.9467162329615861,
"grad_norm": 1.8130978345870972,
"learning_rate": 0.00010651340996168584,
"loss": 2.0972,
"step": 393
},
{
"epoch": 1.9516728624535316,
"grad_norm": 2.161240816116333,
"learning_rate": 0.00010625798212005109,
"loss": 2.5925,
"step": 394
},
{
"epoch": 1.9566294919454772,
"grad_norm": 2.2761476039886475,
"learning_rate": 0.00010600255427841634,
"loss": 2.6694,
"step": 395
},
{
"epoch": 1.9615861214374224,
"grad_norm": 1.6192595958709717,
"learning_rate": 0.00010574712643678162,
"loss": 2.5228,
"step": 396
},
{
"epoch": 1.966542750929368,
"grad_norm": 1.955870270729065,
"learning_rate": 0.00010549169859514687,
"loss": 2.7556,
"step": 397
},
{
"epoch": 1.9714993804213135,
"grad_norm": 1.966259241104126,
"learning_rate": 0.00010523627075351215,
"loss": 2.7568,
"step": 398
},
{
"epoch": 1.9764560099132589,
"grad_norm": 1.7899080514907837,
"learning_rate": 0.0001049808429118774,
"loss": 2.2289,
"step": 399
},
{
"epoch": 1.9814126394052045,
"grad_norm": 1.9266703128814697,
"learning_rate": 0.00010472541507024267,
"loss": 2.8342,
"step": 400
},
{
"epoch": 1.98636926889715,
"grad_norm": 1.9955323934555054,
"learning_rate": 0.00010446998722860792,
"loss": 2.5556,
"step": 401
},
{
"epoch": 1.9913258983890954,
"grad_norm": 1.5379672050476074,
"learning_rate": 0.00010421455938697317,
"loss": 2.32,
"step": 402
},
{
"epoch": 1.996282527881041,
"grad_norm": 1.9460934400558472,
"learning_rate": 0.00010395913154533845,
"loss": 2.4778,
"step": 403
},
{
"epoch": 2.0,
"grad_norm": 1.82111656665802,
"learning_rate": 0.0001037037037037037,
"loss": 2.1362,
"step": 404
},
{
"epoch": 2.0,
"eval_loss": 2.5610225200653076,
"eval_runtime": 22.6572,
"eval_samples_per_second": 17.831,
"eval_steps_per_second": 2.251,
"step": 404
},
{
"epoch": 2.0049566294919456,
"grad_norm": 1.49136221408844,
"learning_rate": 0.00010344827586206898,
"loss": 2.0203,
"step": 405
},
{
"epoch": 2.009913258983891,
"grad_norm": 1.8060108423233032,
"learning_rate": 0.00010319284802043423,
"loss": 2.0393,
"step": 406
},
{
"epoch": 2.0148698884758365,
"grad_norm": 1.7297966480255127,
"learning_rate": 0.0001029374201787995,
"loss": 2.2408,
"step": 407
},
{
"epoch": 2.019826517967782,
"grad_norm": 1.556800365447998,
"learning_rate": 0.00010268199233716475,
"loss": 1.9249,
"step": 408
},
{
"epoch": 2.0247831474597273,
"grad_norm": 1.6368837356567383,
"learning_rate": 0.00010242656449553,
"loss": 2.1408,
"step": 409
},
{
"epoch": 2.029739776951673,
"grad_norm": 1.6964534521102905,
"learning_rate": 0.00010217113665389528,
"loss": 2.0488,
"step": 410
},
{
"epoch": 2.034696406443618,
"grad_norm": 1.8046094179153442,
"learning_rate": 0.00010191570881226053,
"loss": 2.3179,
"step": 411
},
{
"epoch": 2.039653035935564,
"grad_norm": 1.6061656475067139,
"learning_rate": 0.0001016602809706258,
"loss": 2.245,
"step": 412
},
{
"epoch": 2.0446096654275094,
"grad_norm": 1.763981580734253,
"learning_rate": 0.00010140485312899106,
"loss": 2.1131,
"step": 413
},
{
"epoch": 2.0495662949194546,
"grad_norm": 1.6230251789093018,
"learning_rate": 0.00010114942528735633,
"loss": 1.8017,
"step": 414
},
{
"epoch": 2.0545229244114003,
"grad_norm": 2.0794503688812256,
"learning_rate": 0.00010089399744572158,
"loss": 2.0948,
"step": 415
},
{
"epoch": 2.059479553903346,
"grad_norm": 1.9313017129898071,
"learning_rate": 0.00010063856960408685,
"loss": 2.3105,
"step": 416
},
{
"epoch": 2.064436183395291,
"grad_norm": 2.045888662338257,
"learning_rate": 0.00010038314176245211,
"loss": 2.3168,
"step": 417
},
{
"epoch": 2.0693928128872368,
"grad_norm": 1.9172661304473877,
"learning_rate": 0.00010012771392081737,
"loss": 2.2448,
"step": 418
},
{
"epoch": 2.074349442379182,
"grad_norm": 1.7956594228744507,
"learning_rate": 9.987228607918264e-05,
"loss": 2.5406,
"step": 419
},
{
"epoch": 2.0793060718711276,
"grad_norm": 2.210123062133789,
"learning_rate": 9.96168582375479e-05,
"loss": 2.3232,
"step": 420
},
{
"epoch": 2.0842627013630732,
"grad_norm": 1.982399821281433,
"learning_rate": 9.936143039591316e-05,
"loss": 1.9847,
"step": 421
},
{
"epoch": 2.0892193308550184,
"grad_norm": 1.915616750717163,
"learning_rate": 9.910600255427843e-05,
"loss": 2.4827,
"step": 422
},
{
"epoch": 2.094175960346964,
"grad_norm": 2.1594433784484863,
"learning_rate": 9.885057471264369e-05,
"loss": 2.2737,
"step": 423
},
{
"epoch": 2.0991325898389097,
"grad_norm": 1.9990178346633911,
"learning_rate": 9.859514687100895e-05,
"loss": 2.2268,
"step": 424
},
{
"epoch": 2.104089219330855,
"grad_norm": 1.9061527252197266,
"learning_rate": 9.833971902937422e-05,
"loss": 2.0164,
"step": 425
},
{
"epoch": 2.1090458488228006,
"grad_norm": 1.9608116149902344,
"learning_rate": 9.808429118773947e-05,
"loss": 2.2343,
"step": 426
},
{
"epoch": 2.114002478314746,
"grad_norm": 1.7611507177352905,
"learning_rate": 9.782886334610473e-05,
"loss": 2.1769,
"step": 427
},
{
"epoch": 2.1189591078066914,
"grad_norm": 1.9356578588485718,
"learning_rate": 9.757343550446999e-05,
"loss": 2.0807,
"step": 428
},
{
"epoch": 2.123915737298637,
"grad_norm": 2.1335885524749756,
"learning_rate": 9.731800766283526e-05,
"loss": 2.2264,
"step": 429
},
{
"epoch": 2.1288723667905822,
"grad_norm": 1.7350636720657349,
"learning_rate": 9.706257982120052e-05,
"loss": 2.0111,
"step": 430
},
{
"epoch": 2.133828996282528,
"grad_norm": 2.3487319946289062,
"learning_rate": 9.680715197956578e-05,
"loss": 2.634,
"step": 431
},
{
"epoch": 2.1387856257744735,
"grad_norm": 2.1219382286071777,
"learning_rate": 9.655172413793105e-05,
"loss": 2.2604,
"step": 432
},
{
"epoch": 2.1437422552664187,
"grad_norm": 2.3536593914031982,
"learning_rate": 9.62962962962963e-05,
"loss": 2.7046,
"step": 433
},
{
"epoch": 2.1486988847583643,
"grad_norm": 2.1545400619506836,
"learning_rate": 9.604086845466156e-05,
"loss": 1.8161,
"step": 434
},
{
"epoch": 2.15365551425031,
"grad_norm": 1.805413842201233,
"learning_rate": 9.578544061302682e-05,
"loss": 2.0053,
"step": 435
},
{
"epoch": 2.158612143742255,
"grad_norm": 1.9322012662887573,
"learning_rate": 9.553001277139209e-05,
"loss": 2.0946,
"step": 436
},
{
"epoch": 2.163568773234201,
"grad_norm": 1.7978978157043457,
"learning_rate": 9.527458492975735e-05,
"loss": 2.059,
"step": 437
},
{
"epoch": 2.168525402726146,
"grad_norm": 2.3015379905700684,
"learning_rate": 9.501915708812261e-05,
"loss": 2.2307,
"step": 438
},
{
"epoch": 2.1734820322180917,
"grad_norm": 1.9533964395523071,
"learning_rate": 9.476372924648788e-05,
"loss": 1.7493,
"step": 439
},
{
"epoch": 2.1784386617100373,
"grad_norm": 2.048163414001465,
"learning_rate": 9.450830140485314e-05,
"loss": 2.1165,
"step": 440
},
{
"epoch": 2.1833952912019825,
"grad_norm": 1.8135693073272705,
"learning_rate": 9.425287356321839e-05,
"loss": 1.9652,
"step": 441
},
{
"epoch": 2.188351920693928,
"grad_norm": 2.1022565364837646,
"learning_rate": 9.399744572158365e-05,
"loss": 1.8261,
"step": 442
},
{
"epoch": 2.193308550185874,
"grad_norm": 2.4095077514648438,
"learning_rate": 9.374201787994892e-05,
"loss": 2.3235,
"step": 443
},
{
"epoch": 2.198265179677819,
"grad_norm": 2.243868350982666,
"learning_rate": 9.348659003831418e-05,
"loss": 2.1528,
"step": 444
},
{
"epoch": 2.2032218091697646,
"grad_norm": 2.369640350341797,
"learning_rate": 9.323116219667944e-05,
"loss": 2.3696,
"step": 445
},
{
"epoch": 2.20817843866171,
"grad_norm": 1.9481455087661743,
"learning_rate": 9.29757343550447e-05,
"loss": 2.584,
"step": 446
},
{
"epoch": 2.2131350681536555,
"grad_norm": 2.1010377407073975,
"learning_rate": 9.272030651340997e-05,
"loss": 1.97,
"step": 447
},
{
"epoch": 2.218091697645601,
"grad_norm": 2.5184261798858643,
"learning_rate": 9.246487867177522e-05,
"loss": 2.1047,
"step": 448
},
{
"epoch": 2.2230483271375463,
"grad_norm": 2.3073112964630127,
"learning_rate": 9.220945083014048e-05,
"loss": 2.2998,
"step": 449
},
{
"epoch": 2.228004956629492,
"grad_norm": 2.2876088619232178,
"learning_rate": 9.195402298850575e-05,
"loss": 2.452,
"step": 450
},
{
"epoch": 2.2329615861214376,
"grad_norm": 3.126934289932251,
"learning_rate": 9.169859514687101e-05,
"loss": 2.6555,
"step": 451
},
{
"epoch": 2.2379182156133828,
"grad_norm": 2.048002004623413,
"learning_rate": 9.144316730523627e-05,
"loss": 1.8546,
"step": 452
},
{
"epoch": 2.2428748451053284,
"grad_norm": 1.8380862474441528,
"learning_rate": 9.118773946360154e-05,
"loss": 2.6203,
"step": 453
},
{
"epoch": 2.247831474597274,
"grad_norm": 1.662384033203125,
"learning_rate": 9.09323116219668e-05,
"loss": 1.6749,
"step": 454
},
{
"epoch": 2.2527881040892193,
"grad_norm": 1.770814299583435,
"learning_rate": 9.067688378033205e-05,
"loss": 1.9807,
"step": 455
},
{
"epoch": 2.257744733581165,
"grad_norm": 2.3417553901672363,
"learning_rate": 9.042145593869731e-05,
"loss": 2.1493,
"step": 456
},
{
"epoch": 2.26270136307311,
"grad_norm": 2.1765296459198,
"learning_rate": 9.016602809706258e-05,
"loss": 1.9696,
"step": 457
},
{
"epoch": 2.2676579925650557,
"grad_norm": 2.5647733211517334,
"learning_rate": 8.991060025542784e-05,
"loss": 2.4712,
"step": 458
},
{
"epoch": 2.2726146220570014,
"grad_norm": 2.0637850761413574,
"learning_rate": 8.96551724137931e-05,
"loss": 2.4461,
"step": 459
},
{
"epoch": 2.2775712515489466,
"grad_norm": 2.011399030685425,
"learning_rate": 8.939974457215837e-05,
"loss": 2.231,
"step": 460
},
{
"epoch": 2.282527881040892,
"grad_norm": 2.521390676498413,
"learning_rate": 8.914431673052363e-05,
"loss": 2.0845,
"step": 461
},
{
"epoch": 2.287484510532838,
"grad_norm": 2.049290895462036,
"learning_rate": 8.888888888888889e-05,
"loss": 2.1271,
"step": 462
},
{
"epoch": 2.292441140024783,
"grad_norm": 2.2892420291900635,
"learning_rate": 8.863346104725416e-05,
"loss": 2.4772,
"step": 463
},
{
"epoch": 2.2973977695167287,
"grad_norm": 2.2590816020965576,
"learning_rate": 8.837803320561942e-05,
"loss": 2.2858,
"step": 464
},
{
"epoch": 2.3023543990086743,
"grad_norm": 2.2737834453582764,
"learning_rate": 8.812260536398468e-05,
"loss": 2.0891,
"step": 465
},
{
"epoch": 2.3073110285006195,
"grad_norm": 2.037618398666382,
"learning_rate": 8.786717752234995e-05,
"loss": 2.2451,
"step": 466
},
{
"epoch": 2.312267657992565,
"grad_norm": 2.5216243267059326,
"learning_rate": 8.761174968071521e-05,
"loss": 2.5849,
"step": 467
},
{
"epoch": 2.3172242874845104,
"grad_norm": 2.036977529525757,
"learning_rate": 8.735632183908047e-05,
"loss": 1.9554,
"step": 468
},
{
"epoch": 2.322180916976456,
"grad_norm": 2.277539014816284,
"learning_rate": 8.710089399744572e-05,
"loss": 2.3908,
"step": 469
},
{
"epoch": 2.3271375464684017,
"grad_norm": 2.4834628105163574,
"learning_rate": 8.684546615581099e-05,
"loss": 2.7662,
"step": 470
},
{
"epoch": 2.332094175960347,
"grad_norm": 2.456012487411499,
"learning_rate": 8.659003831417625e-05,
"loss": 2.285,
"step": 471
},
{
"epoch": 2.3370508054522925,
"grad_norm": 2.3700196743011475,
"learning_rate": 8.633461047254151e-05,
"loss": 2.3568,
"step": 472
},
{
"epoch": 2.3420074349442377,
"grad_norm": 2.248645067214966,
"learning_rate": 8.607918263090678e-05,
"loss": 1.9046,
"step": 473
},
{
"epoch": 2.3469640644361833,
"grad_norm": 2.076503038406372,
"learning_rate": 8.582375478927204e-05,
"loss": 1.8723,
"step": 474
},
{
"epoch": 2.351920693928129,
"grad_norm": 2.2433621883392334,
"learning_rate": 8.55683269476373e-05,
"loss": 2.2344,
"step": 475
},
{
"epoch": 2.356877323420074,
"grad_norm": 1.9284616708755493,
"learning_rate": 8.531289910600255e-05,
"loss": 1.898,
"step": 476
},
{
"epoch": 2.36183395291202,
"grad_norm": 1.9935495853424072,
"learning_rate": 8.505747126436782e-05,
"loss": 1.879,
"step": 477
},
{
"epoch": 2.3667905824039654,
"grad_norm": 1.9034062623977661,
"learning_rate": 8.480204342273308e-05,
"loss": 2.1425,
"step": 478
},
{
"epoch": 2.3717472118959106,
"grad_norm": 2.1569809913635254,
"learning_rate": 8.454661558109834e-05,
"loss": 1.9005,
"step": 479
},
{
"epoch": 2.3767038413878563,
"grad_norm": 2.1620583534240723,
"learning_rate": 8.42911877394636e-05,
"loss": 2.4077,
"step": 480
},
{
"epoch": 2.381660470879802,
"grad_norm": 2.290148973464966,
"learning_rate": 8.403575989782887e-05,
"loss": 2.3896,
"step": 481
},
{
"epoch": 2.386617100371747,
"grad_norm": 1.9209684133529663,
"learning_rate": 8.378033205619413e-05,
"loss": 2.0921,
"step": 482
},
{
"epoch": 2.3915737298636928,
"grad_norm": 2.356311798095703,
"learning_rate": 8.35249042145594e-05,
"loss": 2.0363,
"step": 483
},
{
"epoch": 2.396530359355638,
"grad_norm": 2.5131113529205322,
"learning_rate": 8.326947637292465e-05,
"loss": 2.7636,
"step": 484
},
{
"epoch": 2.4014869888475836,
"grad_norm": 2.132436752319336,
"learning_rate": 8.301404853128991e-05,
"loss": 2.2943,
"step": 485
},
{
"epoch": 2.4064436183395292,
"grad_norm": 2.254635810852051,
"learning_rate": 8.275862068965517e-05,
"loss": 2.4961,
"step": 486
},
{
"epoch": 2.4114002478314744,
"grad_norm": 2.748410701751709,
"learning_rate": 8.250319284802044e-05,
"loss": 2.6137,
"step": 487
},
{
"epoch": 2.41635687732342,
"grad_norm": 2.170868158340454,
"learning_rate": 8.22477650063857e-05,
"loss": 2.1018,
"step": 488
},
{
"epoch": 2.4213135068153657,
"grad_norm": 2.4472765922546387,
"learning_rate": 8.199233716475096e-05,
"loss": 2.5278,
"step": 489
},
{
"epoch": 2.426270136307311,
"grad_norm": 1.8834490776062012,
"learning_rate": 8.173690932311623e-05,
"loss": 1.8536,
"step": 490
},
{
"epoch": 2.4312267657992566,
"grad_norm": 1.76395583152771,
"learning_rate": 8.148148148148148e-05,
"loss": 1.9397,
"step": 491
},
{
"epoch": 2.436183395291202,
"grad_norm": 2.677208662033081,
"learning_rate": 8.122605363984674e-05,
"loss": 2.1501,
"step": 492
},
{
"epoch": 2.4411400247831474,
"grad_norm": 1.9943513870239258,
"learning_rate": 8.0970625798212e-05,
"loss": 2.3874,
"step": 493
},
{
"epoch": 2.446096654275093,
"grad_norm": 2.273705244064331,
"learning_rate": 8.071519795657727e-05,
"loss": 2.0298,
"step": 494
},
{
"epoch": 2.4510532837670382,
"grad_norm": 2.03985595703125,
"learning_rate": 8.045977011494253e-05,
"loss": 1.9825,
"step": 495
},
{
"epoch": 2.456009913258984,
"grad_norm": 1.9304776191711426,
"learning_rate": 8.020434227330779e-05,
"loss": 1.8176,
"step": 496
},
{
"epoch": 2.4609665427509295,
"grad_norm": 2.278892755508423,
"learning_rate": 7.994891443167306e-05,
"loss": 2.3092,
"step": 497
},
{
"epoch": 2.4659231722428747,
"grad_norm": 2.163693428039551,
"learning_rate": 7.969348659003832e-05,
"loss": 2.195,
"step": 498
},
{
"epoch": 2.4708798017348204,
"grad_norm": 2.5456533432006836,
"learning_rate": 7.943805874840358e-05,
"loss": 2.2795,
"step": 499
},
{
"epoch": 2.4758364312267656,
"grad_norm": 2.565223455429077,
"learning_rate": 7.918263090676885e-05,
"loss": 2.3311,
"step": 500
},
{
"epoch": 2.480793060718711,
"grad_norm": 2.468602180480957,
"learning_rate": 7.892720306513411e-05,
"loss": 2.6716,
"step": 501
},
{
"epoch": 2.485749690210657,
"grad_norm": 2.2904815673828125,
"learning_rate": 7.867177522349937e-05,
"loss": 2.1953,
"step": 502
},
{
"epoch": 2.4907063197026025,
"grad_norm": 2.0819876194000244,
"learning_rate": 7.841634738186464e-05,
"loss": 2.2799,
"step": 503
},
{
"epoch": 2.4956629491945477,
"grad_norm": 2.262396812438965,
"learning_rate": 7.81609195402299e-05,
"loss": 2.3044,
"step": 504
},
{
"epoch": 2.5006195786864933,
"grad_norm": 2.2627463340759277,
"learning_rate": 7.790549169859515e-05,
"loss": 2.3557,
"step": 505
},
{
"epoch": 2.5055762081784385,
"grad_norm": 2.0544724464416504,
"learning_rate": 7.765006385696041e-05,
"loss": 1.9032,
"step": 506
},
{
"epoch": 2.510532837670384,
"grad_norm": 2.1277432441711426,
"learning_rate": 7.739463601532568e-05,
"loss": 2.3961,
"step": 507
},
{
"epoch": 2.51548946716233,
"grad_norm": 2.0343055725097656,
"learning_rate": 7.713920817369094e-05,
"loss": 2.037,
"step": 508
},
{
"epoch": 2.520446096654275,
"grad_norm": 2.4001617431640625,
"learning_rate": 7.68837803320562e-05,
"loss": 2.4522,
"step": 509
},
{
"epoch": 2.5254027261462206,
"grad_norm": 1.8327491283416748,
"learning_rate": 7.662835249042147e-05,
"loss": 1.9539,
"step": 510
},
{
"epoch": 2.530359355638166,
"grad_norm": 1.9295401573181152,
"learning_rate": 7.637292464878673e-05,
"loss": 1.9554,
"step": 511
},
{
"epoch": 2.5353159851301115,
"grad_norm": 2.409006118774414,
"learning_rate": 7.611749680715198e-05,
"loss": 1.8266,
"step": 512
},
{
"epoch": 2.540272614622057,
"grad_norm": 2.3173720836639404,
"learning_rate": 7.586206896551724e-05,
"loss": 2.4242,
"step": 513
},
{
"epoch": 2.5452292441140028,
"grad_norm": 2.0689756870269775,
"learning_rate": 7.56066411238825e-05,
"loss": 2.1029,
"step": 514
},
{
"epoch": 2.550185873605948,
"grad_norm": 2.1280126571655273,
"learning_rate": 7.535121328224777e-05,
"loss": 2.1136,
"step": 515
},
{
"epoch": 2.5551425030978936,
"grad_norm": 2.9854607582092285,
"learning_rate": 7.509578544061303e-05,
"loss": 2.8267,
"step": 516
},
{
"epoch": 2.560099132589839,
"grad_norm": 2.0449581146240234,
"learning_rate": 7.48403575989783e-05,
"loss": 2.1204,
"step": 517
},
{
"epoch": 2.5650557620817844,
"grad_norm": 2.152194023132324,
"learning_rate": 7.458492975734356e-05,
"loss": 1.6798,
"step": 518
},
{
"epoch": 2.57001239157373,
"grad_norm": 2.301673412322998,
"learning_rate": 7.432950191570882e-05,
"loss": 2.2609,
"step": 519
},
{
"epoch": 2.5749690210656753,
"grad_norm": 2.390002489089966,
"learning_rate": 7.407407407407407e-05,
"loss": 2.4318,
"step": 520
},
{
"epoch": 2.579925650557621,
"grad_norm": 3.2430877685546875,
"learning_rate": 7.381864623243934e-05,
"loss": 2.4938,
"step": 521
},
{
"epoch": 2.584882280049566,
"grad_norm": 2.227306842803955,
"learning_rate": 7.35632183908046e-05,
"loss": 2.1597,
"step": 522
},
{
"epoch": 2.5898389095415117,
"grad_norm": 2.1724979877471924,
"learning_rate": 7.330779054916986e-05,
"loss": 2.1702,
"step": 523
},
{
"epoch": 2.5947955390334574,
"grad_norm": 2.4228322505950928,
"learning_rate": 7.305236270753513e-05,
"loss": 2.4855,
"step": 524
},
{
"epoch": 2.5997521685254026,
"grad_norm": 2.368286609649658,
"learning_rate": 7.279693486590039e-05,
"loss": 2.5217,
"step": 525
},
{
"epoch": 2.6047087980173482,
"grad_norm": 1.8513636589050293,
"learning_rate": 7.254150702426565e-05,
"loss": 2.2185,
"step": 526
},
{
"epoch": 2.6096654275092934,
"grad_norm": 2.2481653690338135,
"learning_rate": 7.22860791826309e-05,
"loss": 2.1565,
"step": 527
},
{
"epoch": 2.614622057001239,
"grad_norm": 2.042464256286621,
"learning_rate": 7.203065134099617e-05,
"loss": 2.2602,
"step": 528
},
{
"epoch": 2.6195786864931847,
"grad_norm": 2.1742701530456543,
"learning_rate": 7.177522349936143e-05,
"loss": 2.0985,
"step": 529
},
{
"epoch": 2.6245353159851303,
"grad_norm": 2.0808088779449463,
"learning_rate": 7.151979565772669e-05,
"loss": 2.0583,
"step": 530
},
{
"epoch": 2.6294919454770755,
"grad_norm": 1.9378777742385864,
"learning_rate": 7.126436781609196e-05,
"loss": 1.7537,
"step": 531
},
{
"epoch": 2.634448574969021,
"grad_norm": 2.5020270347595215,
"learning_rate": 7.100893997445722e-05,
"loss": 2.3168,
"step": 532
},
{
"epoch": 2.6394052044609664,
"grad_norm": 2.423002004623413,
"learning_rate": 7.075351213282248e-05,
"loss": 2.2957,
"step": 533
},
{
"epoch": 2.644361833952912,
"grad_norm": 1.9235918521881104,
"learning_rate": 7.049808429118773e-05,
"loss": 2.1923,
"step": 534
},
{
"epoch": 2.6493184634448577,
"grad_norm": 2.0859873294830322,
"learning_rate": 7.0242656449553e-05,
"loss": 2.1833,
"step": 535
},
{
"epoch": 2.654275092936803,
"grad_norm": 2.3581674098968506,
"learning_rate": 6.998722860791826e-05,
"loss": 2.3217,
"step": 536
},
{
"epoch": 2.6592317224287485,
"grad_norm": 2.184673309326172,
"learning_rate": 6.973180076628352e-05,
"loss": 2.2694,
"step": 537
},
{
"epoch": 2.6641883519206937,
"grad_norm": 2.373626708984375,
"learning_rate": 6.947637292464879e-05,
"loss": 2.2407,
"step": 538
},
{
"epoch": 2.6691449814126393,
"grad_norm": 2.328784704208374,
"learning_rate": 6.922094508301405e-05,
"loss": 2.6027,
"step": 539
},
{
"epoch": 2.674101610904585,
"grad_norm": 2.451972007751465,
"learning_rate": 6.896551724137931e-05,
"loss": 2.1135,
"step": 540
},
{
"epoch": 2.6790582403965306,
"grad_norm": 2.210793972015381,
"learning_rate": 6.871008939974458e-05,
"loss": 2.0632,
"step": 541
},
{
"epoch": 2.684014869888476,
"grad_norm": 2.022038698196411,
"learning_rate": 6.845466155810984e-05,
"loss": 1.7921,
"step": 542
},
{
"epoch": 2.6889714993804215,
"grad_norm": 2.6344008445739746,
"learning_rate": 6.81992337164751e-05,
"loss": 2.2798,
"step": 543
},
{
"epoch": 2.6939281288723667,
"grad_norm": 2.514261484146118,
"learning_rate": 6.794380587484037e-05,
"loss": 2.424,
"step": 544
},
{
"epoch": 2.6988847583643123,
"grad_norm": 2.2451043128967285,
"learning_rate": 6.768837803320563e-05,
"loss": 1.9677,
"step": 545
},
{
"epoch": 2.703841387856258,
"grad_norm": 2.241933822631836,
"learning_rate": 6.74329501915709e-05,
"loss": 2.4206,
"step": 546
},
{
"epoch": 2.708798017348203,
"grad_norm": 2.3536107540130615,
"learning_rate": 6.717752234993616e-05,
"loss": 2.5814,
"step": 547
},
{
"epoch": 2.7137546468401488,
"grad_norm": 2.215730905532837,
"learning_rate": 6.69220945083014e-05,
"loss": 1.8021,
"step": 548
},
{
"epoch": 2.718711276332094,
"grad_norm": 2.3178601264953613,
"learning_rate": 6.666666666666667e-05,
"loss": 2.2829,
"step": 549
},
{
"epoch": 2.7236679058240396,
"grad_norm": 2.420584201812744,
"learning_rate": 6.641123882503193e-05,
"loss": 2.3131,
"step": 550
},
{
"epoch": 2.7286245353159853,
"grad_norm": 2.242386817932129,
"learning_rate": 6.61558109833972e-05,
"loss": 1.9557,
"step": 551
},
{
"epoch": 2.7335811648079305,
"grad_norm": 2.050896167755127,
"learning_rate": 6.590038314176246e-05,
"loss": 1.9561,
"step": 552
},
{
"epoch": 2.738537794299876,
"grad_norm": 2.038100004196167,
"learning_rate": 6.564495530012772e-05,
"loss": 2.1168,
"step": 553
},
{
"epoch": 2.7434944237918213,
"grad_norm": 2.1917381286621094,
"learning_rate": 6.538952745849299e-05,
"loss": 1.873,
"step": 554
},
{
"epoch": 2.748451053283767,
"grad_norm": 2.4844274520874023,
"learning_rate": 6.513409961685824e-05,
"loss": 2.3741,
"step": 555
},
{
"epoch": 2.7534076827757126,
"grad_norm": 2.1887197494506836,
"learning_rate": 6.48786717752235e-05,
"loss": 1.7504,
"step": 556
},
{
"epoch": 2.758364312267658,
"grad_norm": 2.391392230987549,
"learning_rate": 6.462324393358876e-05,
"loss": 2.3918,
"step": 557
},
{
"epoch": 2.7633209417596034,
"grad_norm": 2.299013376235962,
"learning_rate": 6.436781609195403e-05,
"loss": 2.0697,
"step": 558
},
{
"epoch": 2.768277571251549,
"grad_norm": 2.294445276260376,
"learning_rate": 6.411238825031929e-05,
"loss": 2.017,
"step": 559
},
{
"epoch": 2.7732342007434942,
"grad_norm": 2.0729377269744873,
"learning_rate": 6.385696040868455e-05,
"loss": 2.2024,
"step": 560
},
{
"epoch": 2.77819083023544,
"grad_norm": 2.1257901191711426,
"learning_rate": 6.360153256704982e-05,
"loss": 1.9048,
"step": 561
},
{
"epoch": 2.7831474597273855,
"grad_norm": 2.4315128326416016,
"learning_rate": 6.334610472541508e-05,
"loss": 2.8421,
"step": 562
},
{
"epoch": 2.7881040892193307,
"grad_norm": 2.669069766998291,
"learning_rate": 6.309067688378033e-05,
"loss": 2.061,
"step": 563
},
{
"epoch": 2.7930607187112764,
"grad_norm": 2.463329792022705,
"learning_rate": 6.283524904214559e-05,
"loss": 2.0992,
"step": 564
},
{
"epoch": 2.7980173482032216,
"grad_norm": 2.218747615814209,
"learning_rate": 6.257982120051086e-05,
"loss": 2.0431,
"step": 565
},
{
"epoch": 2.802973977695167,
"grad_norm": 2.394122838973999,
"learning_rate": 6.232439335887612e-05,
"loss": 2.1749,
"step": 566
},
{
"epoch": 2.807930607187113,
"grad_norm": 2.188235282897949,
"learning_rate": 6.206896551724138e-05,
"loss": 2.2733,
"step": 567
},
{
"epoch": 2.8128872366790585,
"grad_norm": 2.446723222732544,
"learning_rate": 6.181353767560665e-05,
"loss": 2.2211,
"step": 568
},
{
"epoch": 2.8178438661710037,
"grad_norm": 2.2640678882598877,
"learning_rate": 6.155810983397191e-05,
"loss": 2.1568,
"step": 569
},
{
"epoch": 2.8228004956629493,
"grad_norm": 2.8429107666015625,
"learning_rate": 6.130268199233716e-05,
"loss": 2.8168,
"step": 570
},
{
"epoch": 2.8277571251548945,
"grad_norm": 2.4229469299316406,
"learning_rate": 6.104725415070242e-05,
"loss": 2.1375,
"step": 571
},
{
"epoch": 2.83271375464684,
"grad_norm": 2.336423397064209,
"learning_rate": 6.0791826309067686e-05,
"loss": 2.7039,
"step": 572
},
{
"epoch": 2.837670384138786,
"grad_norm": 2.691897392272949,
"learning_rate": 6.053639846743295e-05,
"loss": 2.4094,
"step": 573
},
{
"epoch": 2.842627013630731,
"grad_norm": 2.258892059326172,
"learning_rate": 6.028097062579821e-05,
"loss": 1.7147,
"step": 574
},
{
"epoch": 2.8475836431226766,
"grad_norm": 2.352938175201416,
"learning_rate": 6.0025542784163477e-05,
"loss": 2.0789,
"step": 575
},
{
"epoch": 2.852540272614622,
"grad_norm": 2.318082094192505,
"learning_rate": 5.977011494252874e-05,
"loss": 1.8802,
"step": 576
},
{
"epoch": 2.8574969021065675,
"grad_norm": 2.9496710300445557,
"learning_rate": 5.9514687100893996e-05,
"loss": 2.376,
"step": 577
},
{
"epoch": 2.862453531598513,
"grad_norm": 2.5339314937591553,
"learning_rate": 5.925925925925926e-05,
"loss": 2.2208,
"step": 578
},
{
"epoch": 2.8674101610904588,
"grad_norm": 2.220191717147827,
"learning_rate": 5.900383141762452e-05,
"loss": 1.9416,
"step": 579
},
{
"epoch": 2.872366790582404,
"grad_norm": 2.244831085205078,
"learning_rate": 5.8748403575989787e-05,
"loss": 2.287,
"step": 580
},
{
"epoch": 2.8773234200743496,
"grad_norm": 2.2947471141815186,
"learning_rate": 5.849297573435505e-05,
"loss": 2.233,
"step": 581
},
{
"epoch": 2.882280049566295,
"grad_norm": 2.1228411197662354,
"learning_rate": 5.823754789272031e-05,
"loss": 2.012,
"step": 582
},
{
"epoch": 2.8872366790582404,
"grad_norm": 2.3730921745300293,
"learning_rate": 5.798212005108558e-05,
"loss": 2.3333,
"step": 583
},
{
"epoch": 2.892193308550186,
"grad_norm": 2.1657650470733643,
"learning_rate": 5.7726692209450826e-05,
"loss": 2.3758,
"step": 584
},
{
"epoch": 2.8971499380421313,
"grad_norm": 2.4342710971832275,
"learning_rate": 5.747126436781609e-05,
"loss": 2.5381,
"step": 585
},
{
"epoch": 2.902106567534077,
"grad_norm": 2.218479633331299,
"learning_rate": 5.721583652618135e-05,
"loss": 2.3628,
"step": 586
},
{
"epoch": 2.907063197026022,
"grad_norm": 2.1390647888183594,
"learning_rate": 5.6960408684546617e-05,
"loss": 2.304,
"step": 587
},
{
"epoch": 2.9120198265179678,
"grad_norm": 2.6552858352661133,
"learning_rate": 5.670498084291188e-05,
"loss": 2.2981,
"step": 588
},
{
"epoch": 2.9169764560099134,
"grad_norm": 2.6417832374572754,
"learning_rate": 5.644955300127714e-05,
"loss": 2.4692,
"step": 589
},
{
"epoch": 2.9219330855018586,
"grad_norm": 2.1957082748413086,
"learning_rate": 5.6194125159642407e-05,
"loss": 2.0055,
"step": 590
},
{
"epoch": 2.9268897149938042,
"grad_norm": 2.549053192138672,
"learning_rate": 5.593869731800766e-05,
"loss": 1.9994,
"step": 591
},
{
"epoch": 2.9318463444857494,
"grad_norm": 2.4547526836395264,
"learning_rate": 5.5683269476372927e-05,
"loss": 2.0306,
"step": 592
},
{
"epoch": 2.936802973977695,
"grad_norm": 2.595532178878784,
"learning_rate": 5.542784163473819e-05,
"loss": 2.3909,
"step": 593
},
{
"epoch": 2.9417596034696407,
"grad_norm": 2.1407456398010254,
"learning_rate": 5.517241379310345e-05,
"loss": 2.1741,
"step": 594
},
{
"epoch": 2.9467162329615864,
"grad_norm": 2.4364256858825684,
"learning_rate": 5.491698595146872e-05,
"loss": 2.4011,
"step": 595
},
{
"epoch": 2.9516728624535316,
"grad_norm": 2.644935369491577,
"learning_rate": 5.466155810983398e-05,
"loss": 2.2418,
"step": 596
},
{
"epoch": 2.956629491945477,
"grad_norm": 2.2009565830230713,
"learning_rate": 5.440613026819924e-05,
"loss": 2.3574,
"step": 597
},
{
"epoch": 2.9615861214374224,
"grad_norm": 2.40665340423584,
"learning_rate": 5.415070242656451e-05,
"loss": 2.2198,
"step": 598
},
{
"epoch": 2.966542750929368,
"grad_norm": 1.834892749786377,
"learning_rate": 5.3895274584929756e-05,
"loss": 2.0522,
"step": 599
},
{
"epoch": 2.9714993804213137,
"grad_norm": 2.602015256881714,
"learning_rate": 5.363984674329502e-05,
"loss": 2.2518,
"step": 600
},
{
"epoch": 2.976456009913259,
"grad_norm": 2.3021697998046875,
"learning_rate": 5.338441890166028e-05,
"loss": 2.1467,
"step": 601
},
{
"epoch": 2.9814126394052045,
"grad_norm": 2.2826194763183594,
"learning_rate": 5.3128991060025547e-05,
"loss": 2.3371,
"step": 602
},
{
"epoch": 2.9863692688971497,
"grad_norm": 1.934054970741272,
"learning_rate": 5.287356321839081e-05,
"loss": 1.9699,
"step": 603
},
{
"epoch": 2.9913258983890954,
"grad_norm": 2.2678050994873047,
"learning_rate": 5.261813537675607e-05,
"loss": 2.2934,
"step": 604
},
{
"epoch": 2.996282527881041,
"grad_norm": 2.5755562782287598,
"learning_rate": 5.236270753512134e-05,
"loss": 2.4522,
"step": 605
},
{
"epoch": 3.0,
"grad_norm": 3.2130398750305176,
"learning_rate": 5.2107279693486586e-05,
"loss": 2.4297,
"step": 606
},
{
"epoch": 3.0,
"eval_loss": 2.5098254680633545,
"eval_runtime": 22.3298,
"eval_samples_per_second": 18.092,
"eval_steps_per_second": 2.284,
"step": 606
},
{
"epoch": 3.0049566294919456,
"grad_norm": 1.9976105690002441,
"learning_rate": 5.185185185185185e-05,
"loss": 1.9214,
"step": 607
},
{
"epoch": 3.009913258983891,
"grad_norm": 2.0963776111602783,
"learning_rate": 5.159642401021711e-05,
"loss": 2.1928,
"step": 608
},
{
"epoch": 3.0148698884758365,
"grad_norm": 1.9764257669448853,
"learning_rate": 5.1340996168582377e-05,
"loss": 1.8546,
"step": 609
},
{
"epoch": 3.019826517967782,
"grad_norm": 2.3067879676818848,
"learning_rate": 5.108556832694764e-05,
"loss": 2.5585,
"step": 610
},
{
"epoch": 3.0247831474597273,
"grad_norm": 2.3767294883728027,
"learning_rate": 5.08301404853129e-05,
"loss": 2.2411,
"step": 611
},
{
"epoch": 3.029739776951673,
"grad_norm": 1.899346947669983,
"learning_rate": 5.057471264367817e-05,
"loss": 1.8045,
"step": 612
},
{
"epoch": 3.034696406443618,
"grad_norm": 2.2742886543273926,
"learning_rate": 5.031928480204342e-05,
"loss": 1.9178,
"step": 613
},
{
"epoch": 3.039653035935564,
"grad_norm": 2.208033561706543,
"learning_rate": 5.0063856960408687e-05,
"loss": 2.2826,
"step": 614
},
{
"epoch": 3.0446096654275094,
"grad_norm": 2.00700306892395,
"learning_rate": 4.980842911877395e-05,
"loss": 1.7436,
"step": 615
},
{
"epoch": 3.0495662949194546,
"grad_norm": 2.484027147293091,
"learning_rate": 4.955300127713921e-05,
"loss": 2.3736,
"step": 616
},
{
"epoch": 3.0545229244114003,
"grad_norm": 2.4913110733032227,
"learning_rate": 4.929757343550448e-05,
"loss": 2.2775,
"step": 617
},
{
"epoch": 3.059479553903346,
"grad_norm": 1.8059406280517578,
"learning_rate": 4.904214559386973e-05,
"loss": 1.5242,
"step": 618
},
{
"epoch": 3.064436183395291,
"grad_norm": 2.3238720893859863,
"learning_rate": 4.8786717752234997e-05,
"loss": 2.1255,
"step": 619
},
{
"epoch": 3.0693928128872368,
"grad_norm": 1.9283521175384521,
"learning_rate": 4.853128991060026e-05,
"loss": 1.5993,
"step": 620
},
{
"epoch": 3.074349442379182,
"grad_norm": 2.2595303058624268,
"learning_rate": 4.827586206896552e-05,
"loss": 1.999,
"step": 621
},
{
"epoch": 3.0793060718711276,
"grad_norm": 2.251521110534668,
"learning_rate": 4.802043422733078e-05,
"loss": 1.8928,
"step": 622
},
{
"epoch": 3.0842627013630732,
"grad_norm": 2.7896904945373535,
"learning_rate": 4.776500638569604e-05,
"loss": 2.0757,
"step": 623
},
{
"epoch": 3.0892193308550184,
"grad_norm": 2.178480863571167,
"learning_rate": 4.7509578544061307e-05,
"loss": 2.0942,
"step": 624
},
{
"epoch": 3.094175960346964,
"grad_norm": 2.765531539916992,
"learning_rate": 4.725415070242657e-05,
"loss": 1.9807,
"step": 625
},
{
"epoch": 3.0991325898389097,
"grad_norm": 1.9775290489196777,
"learning_rate": 4.6998722860791827e-05,
"loss": 1.9953,
"step": 626
},
{
"epoch": 3.104089219330855,
"grad_norm": 2.0385220050811768,
"learning_rate": 4.674329501915709e-05,
"loss": 1.6417,
"step": 627
},
{
"epoch": 3.1090458488228006,
"grad_norm": 2.535924196243286,
"learning_rate": 4.648786717752235e-05,
"loss": 2.3085,
"step": 628
},
{
"epoch": 3.114002478314746,
"grad_norm": 2.8627915382385254,
"learning_rate": 4.623243933588761e-05,
"loss": 2.1261,
"step": 629
},
{
"epoch": 3.1189591078066914,
"grad_norm": 2.845181941986084,
"learning_rate": 4.597701149425287e-05,
"loss": 2.1133,
"step": 630
},
{
"epoch": 3.123915737298637,
"grad_norm": 2.202937602996826,
"learning_rate": 4.5721583652618137e-05,
"loss": 1.967,
"step": 631
},
{
"epoch": 3.1288723667905822,
"grad_norm": 2.412930727005005,
"learning_rate": 4.54661558109834e-05,
"loss": 1.8034,
"step": 632
},
{
"epoch": 3.133828996282528,
"grad_norm": 2.56500244140625,
"learning_rate": 4.5210727969348656e-05,
"loss": 2.2214,
"step": 633
},
{
"epoch": 3.1387856257744735,
"grad_norm": 2.520071506500244,
"learning_rate": 4.495530012771392e-05,
"loss": 1.9551,
"step": 634
},
{
"epoch": 3.1437422552664187,
"grad_norm": 2.391106605529785,
"learning_rate": 4.469987228607918e-05,
"loss": 1.8267,
"step": 635
},
{
"epoch": 3.1486988847583643,
"grad_norm": 2.86560320854187,
"learning_rate": 4.4444444444444447e-05,
"loss": 2.0847,
"step": 636
},
{
"epoch": 3.15365551425031,
"grad_norm": 2.5105133056640625,
"learning_rate": 4.418901660280971e-05,
"loss": 1.9333,
"step": 637
},
{
"epoch": 3.158612143742255,
"grad_norm": 2.3541910648345947,
"learning_rate": 4.393358876117497e-05,
"loss": 2.0839,
"step": 638
},
{
"epoch": 3.163568773234201,
"grad_norm": 2.4766783714294434,
"learning_rate": 4.367816091954024e-05,
"loss": 1.9689,
"step": 639
},
{
"epoch": 3.168525402726146,
"grad_norm": 2.1712841987609863,
"learning_rate": 4.342273307790549e-05,
"loss": 1.7512,
"step": 640
},
{
"epoch": 3.1734820322180917,
"grad_norm": 2.2210240364074707,
"learning_rate": 4.3167305236270757e-05,
"loss": 1.8299,
"step": 641
},
{
"epoch": 3.1784386617100373,
"grad_norm": 2.5499587059020996,
"learning_rate": 4.291187739463602e-05,
"loss": 2.051,
"step": 642
},
{
"epoch": 3.1833952912019825,
"grad_norm": 2.5971527099609375,
"learning_rate": 4.2656449553001277e-05,
"loss": 2.0613,
"step": 643
},
{
"epoch": 3.188351920693928,
"grad_norm": 2.212960720062256,
"learning_rate": 4.240102171136654e-05,
"loss": 2.0225,
"step": 644
},
{
"epoch": 3.193308550185874,
"grad_norm": 2.652787923812866,
"learning_rate": 4.21455938697318e-05,
"loss": 2.1012,
"step": 645
},
{
"epoch": 3.198265179677819,
"grad_norm": 2.414275884628296,
"learning_rate": 4.189016602809707e-05,
"loss": 2.159,
"step": 646
},
{
"epoch": 3.2032218091697646,
"grad_norm": 2.223020076751709,
"learning_rate": 4.163473818646232e-05,
"loss": 1.6145,
"step": 647
},
{
"epoch": 3.20817843866171,
"grad_norm": 2.7731528282165527,
"learning_rate": 4.1379310344827587e-05,
"loss": 2.3619,
"step": 648
},
{
"epoch": 3.2131350681536555,
"grad_norm": 2.2961618900299072,
"learning_rate": 4.112388250319285e-05,
"loss": 1.9229,
"step": 649
},
{
"epoch": 3.218091697645601,
"grad_norm": 2.880171298980713,
"learning_rate": 4.086845466155811e-05,
"loss": 2.4915,
"step": 650
},
{
"epoch": 3.2230483271375463,
"grad_norm": 2.2541770935058594,
"learning_rate": 4.061302681992337e-05,
"loss": 2.224,
"step": 651
},
{
"epoch": 3.228004956629492,
"grad_norm": 2.63120436668396,
"learning_rate": 4.035759897828863e-05,
"loss": 1.9003,
"step": 652
},
{
"epoch": 3.2329615861214376,
"grad_norm": 2.031409740447998,
"learning_rate": 4.0102171136653897e-05,
"loss": 1.9379,
"step": 653
},
{
"epoch": 3.2379182156133828,
"grad_norm": 2.8178703784942627,
"learning_rate": 3.984674329501916e-05,
"loss": 1.8691,
"step": 654
},
{
"epoch": 3.2428748451053284,
"grad_norm": 2.5390496253967285,
"learning_rate": 3.959131545338442e-05,
"loss": 1.9412,
"step": 655
},
{
"epoch": 3.247831474597274,
"grad_norm": 2.23886775970459,
"learning_rate": 3.933588761174969e-05,
"loss": 1.8117,
"step": 656
},
{
"epoch": 3.2527881040892193,
"grad_norm": 2.419747829437256,
"learning_rate": 3.908045977011495e-05,
"loss": 1.8492,
"step": 657
},
{
"epoch": 3.257744733581165,
"grad_norm": 2.5116543769836426,
"learning_rate": 3.8825031928480207e-05,
"loss": 1.9955,
"step": 658
},
{
"epoch": 3.26270136307311,
"grad_norm": 2.7984719276428223,
"learning_rate": 3.856960408684547e-05,
"loss": 2.0657,
"step": 659
},
{
"epoch": 3.2676579925650557,
"grad_norm": 2.3362345695495605,
"learning_rate": 3.831417624521073e-05,
"loss": 1.9131,
"step": 660
},
{
"epoch": 3.2726146220570014,
"grad_norm": 3.0645365715026855,
"learning_rate": 3.805874840357599e-05,
"loss": 2.3735,
"step": 661
},
{
"epoch": 3.2775712515489466,
"grad_norm": 2.3989381790161133,
"learning_rate": 3.780332056194125e-05,
"loss": 1.4717,
"step": 662
},
{
"epoch": 3.282527881040892,
"grad_norm": 2.7305102348327637,
"learning_rate": 3.7547892720306517e-05,
"loss": 2.3454,
"step": 663
},
{
"epoch": 3.287484510532838,
"grad_norm": 2.355215311050415,
"learning_rate": 3.729246487867178e-05,
"loss": 1.9257,
"step": 664
},
{
"epoch": 3.292441140024783,
"grad_norm": 2.841524600982666,
"learning_rate": 3.7037037037037037e-05,
"loss": 2.1013,
"step": 665
},
{
"epoch": 3.2973977695167287,
"grad_norm": 1.97605562210083,
"learning_rate": 3.67816091954023e-05,
"loss": 1.415,
"step": 666
},
{
"epoch": 3.3023543990086743,
"grad_norm": 2.803922653198242,
"learning_rate": 3.652618135376756e-05,
"loss": 2.3917,
"step": 667
},
{
"epoch": 3.3073110285006195,
"grad_norm": 2.375274658203125,
"learning_rate": 3.627075351213283e-05,
"loss": 2.3449,
"step": 668
},
{
"epoch": 3.312267657992565,
"grad_norm": 2.461966037750244,
"learning_rate": 3.601532567049808e-05,
"loss": 2.0543,
"step": 669
},
{
"epoch": 3.3172242874845104,
"grad_norm": 2.7819225788116455,
"learning_rate": 3.5759897828863347e-05,
"loss": 2.0613,
"step": 670
},
{
"epoch": 3.322180916976456,
"grad_norm": 2.084023952484131,
"learning_rate": 3.550446998722861e-05,
"loss": 1.5794,
"step": 671
},
{
"epoch": 3.3271375464684017,
"grad_norm": 2.4474074840545654,
"learning_rate": 3.5249042145593867e-05,
"loss": 1.973,
"step": 672
},
{
"epoch": 3.332094175960347,
"grad_norm": 2.4582390785217285,
"learning_rate": 3.499361430395913e-05,
"loss": 1.7705,
"step": 673
},
{
"epoch": 3.3370508054522925,
"grad_norm": 2.56362247467041,
"learning_rate": 3.473818646232439e-05,
"loss": 1.9695,
"step": 674
},
{
"epoch": 3.3420074349442377,
"grad_norm": 2.6630728244781494,
"learning_rate": 3.4482758620689657e-05,
"loss": 2.0741,
"step": 675
},
{
"epoch": 3.3469640644361833,
"grad_norm": 2.5984740257263184,
"learning_rate": 3.422733077905492e-05,
"loss": 2.029,
"step": 676
},
{
"epoch": 3.351920693928129,
"grad_norm": 2.3361611366271973,
"learning_rate": 3.397190293742018e-05,
"loss": 1.8688,
"step": 677
},
{
"epoch": 3.356877323420074,
"grad_norm": 3.3585948944091797,
"learning_rate": 3.371647509578545e-05,
"loss": 2.6338,
"step": 678
},
{
"epoch": 3.36183395291202,
"grad_norm": 2.697134494781494,
"learning_rate": 3.34610472541507e-05,
"loss": 1.847,
"step": 679
},
{
"epoch": 3.3667905824039654,
"grad_norm": 2.71582293510437,
"learning_rate": 3.3205619412515967e-05,
"loss": 1.8604,
"step": 680
},
{
"epoch": 3.3717472118959106,
"grad_norm": 2.484410285949707,
"learning_rate": 3.295019157088123e-05,
"loss": 2.3045,
"step": 681
},
{
"epoch": 3.3767038413878563,
"grad_norm": 2.4801011085510254,
"learning_rate": 3.269476372924649e-05,
"loss": 1.9622,
"step": 682
},
{
"epoch": 3.381660470879802,
"grad_norm": 2.462303638458252,
"learning_rate": 3.243933588761175e-05,
"loss": 2.1321,
"step": 683
},
{
"epoch": 3.386617100371747,
"grad_norm": 2.5208513736724854,
"learning_rate": 3.218390804597701e-05,
"loss": 2.031,
"step": 684
},
{
"epoch": 3.3915737298636928,
"grad_norm": 2.2665512561798096,
"learning_rate": 3.192848020434228e-05,
"loss": 1.9891,
"step": 685
},
{
"epoch": 3.396530359355638,
"grad_norm": 2.9241855144500732,
"learning_rate": 3.167305236270754e-05,
"loss": 2.3861,
"step": 686
},
{
"epoch": 3.4014869888475836,
"grad_norm": 2.353585720062256,
"learning_rate": 3.1417624521072797e-05,
"loss": 1.7977,
"step": 687
},
{
"epoch": 3.4064436183395292,
"grad_norm": 2.5655272006988525,
"learning_rate": 3.116219667943806e-05,
"loss": 1.782,
"step": 688
},
{
"epoch": 3.4114002478314744,
"grad_norm": 2.2319204807281494,
"learning_rate": 3.090676883780332e-05,
"loss": 1.4128,
"step": 689
},
{
"epoch": 3.41635687732342,
"grad_norm": 2.703676462173462,
"learning_rate": 3.065134099616858e-05,
"loss": 2.0543,
"step": 690
},
{
"epoch": 3.4213135068153657,
"grad_norm": 2.8589353561401367,
"learning_rate": 3.0395913154533843e-05,
"loss": 2.11,
"step": 691
},
{
"epoch": 3.426270136307311,
"grad_norm": 2.237912893295288,
"learning_rate": 3.0140485312899107e-05,
"loss": 1.8919,
"step": 692
},
{
"epoch": 3.4312267657992566,
"grad_norm": 2.673888683319092,
"learning_rate": 2.988505747126437e-05,
"loss": 2.1326,
"step": 693
},
{
"epoch": 3.436183395291202,
"grad_norm": 2.9751524925231934,
"learning_rate": 2.962962962962963e-05,
"loss": 2.3678,
"step": 694
},
{
"epoch": 3.4411400247831474,
"grad_norm": 3.3065667152404785,
"learning_rate": 2.9374201787994893e-05,
"loss": 2.5252,
"step": 695
},
{
"epoch": 3.446096654275093,
"grad_norm": 2.837353229522705,
"learning_rate": 2.9118773946360157e-05,
"loss": 2.3186,
"step": 696
},
{
"epoch": 3.4510532837670382,
"grad_norm": 2.4829537868499756,
"learning_rate": 2.8863346104725413e-05,
"loss": 2.0368,
"step": 697
},
{
"epoch": 3.456009913258984,
"grad_norm": 3.0476629734039307,
"learning_rate": 2.8607918263090677e-05,
"loss": 2.124,
"step": 698
},
{
"epoch": 3.4609665427509295,
"grad_norm": 2.930732488632202,
"learning_rate": 2.835249042145594e-05,
"loss": 2.2542,
"step": 699
},
{
"epoch": 3.4659231722428747,
"grad_norm": 2.7399027347564697,
"learning_rate": 2.8097062579821203e-05,
"loss": 2.0625,
"step": 700
},
{
"epoch": 3.4708798017348204,
"grad_norm": 2.625471591949463,
"learning_rate": 2.7841634738186463e-05,
"loss": 2.2255,
"step": 701
},
{
"epoch": 3.4758364312267656,
"grad_norm": 2.4757065773010254,
"learning_rate": 2.7586206896551727e-05,
"loss": 2.0139,
"step": 702
},
{
"epoch": 3.480793060718711,
"grad_norm": 2.4212238788604736,
"learning_rate": 2.733077905491699e-05,
"loss": 1.9171,
"step": 703
},
{
"epoch": 3.485749690210657,
"grad_norm": 2.9213318824768066,
"learning_rate": 2.7075351213282253e-05,
"loss": 2.3692,
"step": 704
},
{
"epoch": 3.4907063197026025,
"grad_norm": 2.563901424407959,
"learning_rate": 2.681992337164751e-05,
"loss": 2.2226,
"step": 705
},
{
"epoch": 3.4956629491945477,
"grad_norm": 2.412309169769287,
"learning_rate": 2.6564495530012773e-05,
"loss": 1.7855,
"step": 706
},
{
"epoch": 3.5006195786864933,
"grad_norm": 2.7574050426483154,
"learning_rate": 2.6309067688378037e-05,
"loss": 1.8671,
"step": 707
},
{
"epoch": 3.5055762081784385,
"grad_norm": 2.588981866836548,
"learning_rate": 2.6053639846743293e-05,
"loss": 1.847,
"step": 708
},
{
"epoch": 3.510532837670384,
"grad_norm": 2.5374417304992676,
"learning_rate": 2.5798212005108557e-05,
"loss": 1.8264,
"step": 709
},
{
"epoch": 3.51548946716233,
"grad_norm": 2.9206414222717285,
"learning_rate": 2.554278416347382e-05,
"loss": 2.1438,
"step": 710
},
{
"epoch": 3.520446096654275,
"grad_norm": 3.5458905696868896,
"learning_rate": 2.5287356321839083e-05,
"loss": 2.1679,
"step": 711
},
{
"epoch": 3.5254027261462206,
"grad_norm": 2.4408674240112305,
"learning_rate": 2.5031928480204343e-05,
"loss": 1.8783,
"step": 712
},
{
"epoch": 3.530359355638166,
"grad_norm": 2.9399070739746094,
"learning_rate": 2.4776500638569607e-05,
"loss": 1.7827,
"step": 713
},
{
"epoch": 3.5353159851301115,
"grad_norm": 2.5893685817718506,
"learning_rate": 2.4521072796934867e-05,
"loss": 1.9307,
"step": 714
},
{
"epoch": 3.540272614622057,
"grad_norm": 2.5607779026031494,
"learning_rate": 2.426564495530013e-05,
"loss": 2.1557,
"step": 715
},
{
"epoch": 3.5452292441140028,
"grad_norm": 2.6460790634155273,
"learning_rate": 2.401021711366539e-05,
"loss": 1.8946,
"step": 716
},
{
"epoch": 3.550185873605948,
"grad_norm": 2.498994827270508,
"learning_rate": 2.3754789272030653e-05,
"loss": 1.987,
"step": 717
},
{
"epoch": 3.5551425030978936,
"grad_norm": 2.8072335720062256,
"learning_rate": 2.3499361430395913e-05,
"loss": 2.3501,
"step": 718
},
{
"epoch": 3.560099132589839,
"grad_norm": 2.413820743560791,
"learning_rate": 2.3243933588761177e-05,
"loss": 2.3033,
"step": 719
},
{
"epoch": 3.5650557620817844,
"grad_norm": 1.9895007610321045,
"learning_rate": 2.2988505747126437e-05,
"loss": 1.5816,
"step": 720
},
{
"epoch": 3.57001239157373,
"grad_norm": 2.67324161529541,
"learning_rate": 2.27330779054917e-05,
"loss": 2.3242,
"step": 721
},
{
"epoch": 3.5749690210656753,
"grad_norm": 2.9094364643096924,
"learning_rate": 2.247765006385696e-05,
"loss": 2.133,
"step": 722
},
{
"epoch": 3.579925650557621,
"grad_norm": 2.4571454524993896,
"learning_rate": 2.2222222222222223e-05,
"loss": 2.089,
"step": 723
},
{
"epoch": 3.584882280049566,
"grad_norm": 2.40213680267334,
"learning_rate": 2.1966794380587487e-05,
"loss": 2.013,
"step": 724
},
{
"epoch": 3.5898389095415117,
"grad_norm": 2.4887630939483643,
"learning_rate": 2.1711366538952747e-05,
"loss": 2.0719,
"step": 725
},
{
"epoch": 3.5947955390334574,
"grad_norm": 3.0302541255950928,
"learning_rate": 2.145593869731801e-05,
"loss": 2.2166,
"step": 726
},
{
"epoch": 3.5997521685254026,
"grad_norm": 2.511434555053711,
"learning_rate": 2.120051085568327e-05,
"loss": 2.312,
"step": 727
},
{
"epoch": 3.6047087980173482,
"grad_norm": 2.49587082862854,
"learning_rate": 2.0945083014048533e-05,
"loss": 2.1587,
"step": 728
},
{
"epoch": 3.6096654275092934,
"grad_norm": 2.305344820022583,
"learning_rate": 2.0689655172413793e-05,
"loss": 1.9972,
"step": 729
},
{
"epoch": 3.614622057001239,
"grad_norm": 2.829852342605591,
"learning_rate": 2.0434227330779057e-05,
"loss": 2.1588,
"step": 730
},
{
"epoch": 3.6195786864931847,
"grad_norm": 2.3796768188476562,
"learning_rate": 2.0178799489144317e-05,
"loss": 1.7094,
"step": 731
},
{
"epoch": 3.6245353159851303,
"grad_norm": 2.1699445247650146,
"learning_rate": 1.992337164750958e-05,
"loss": 1.8184,
"step": 732
},
{
"epoch": 3.6294919454770755,
"grad_norm": 2.304624319076538,
"learning_rate": 1.9667943805874843e-05,
"loss": 2.1114,
"step": 733
},
{
"epoch": 3.634448574969021,
"grad_norm": 2.507122278213501,
"learning_rate": 1.9412515964240103e-05,
"loss": 2.0594,
"step": 734
},
{
"epoch": 3.6394052044609664,
"grad_norm": 2.8564951419830322,
"learning_rate": 1.9157088122605367e-05,
"loss": 2.1098,
"step": 735
},
{
"epoch": 3.644361833952912,
"grad_norm": 2.4493966102600098,
"learning_rate": 1.8901660280970627e-05,
"loss": 2.1243,
"step": 736
},
{
"epoch": 3.6493184634448577,
"grad_norm": 2.634030818939209,
"learning_rate": 1.864623243933589e-05,
"loss": 2.1568,
"step": 737
},
{
"epoch": 3.654275092936803,
"grad_norm": 2.6991872787475586,
"learning_rate": 1.839080459770115e-05,
"loss": 2.094,
"step": 738
},
{
"epoch": 3.6592317224287485,
"grad_norm": 2.587801694869995,
"learning_rate": 1.8135376756066413e-05,
"loss": 2.1519,
"step": 739
},
{
"epoch": 3.6641883519206937,
"grad_norm": 2.746302366256714,
"learning_rate": 1.7879948914431673e-05,
"loss": 2.0224,
"step": 740
},
{
"epoch": 3.6691449814126393,
"grad_norm": 2.5982632637023926,
"learning_rate": 1.7624521072796933e-05,
"loss": 2.229,
"step": 741
},
{
"epoch": 3.674101610904585,
"grad_norm": 2.684934616088867,
"learning_rate": 1.7369093231162197e-05,
"loss": 2.1958,
"step": 742
},
{
"epoch": 3.6790582403965306,
"grad_norm": 2.1835129261016846,
"learning_rate": 1.711366538952746e-05,
"loss": 2.0661,
"step": 743
},
{
"epoch": 3.684014869888476,
"grad_norm": 2.6260294914245605,
"learning_rate": 1.6858237547892723e-05,
"loss": 2.3006,
"step": 744
},
{
"epoch": 3.6889714993804215,
"grad_norm": 2.598024368286133,
"learning_rate": 1.6602809706257983e-05,
"loss": 2.184,
"step": 745
},
{
"epoch": 3.6939281288723667,
"grad_norm": 2.5640017986297607,
"learning_rate": 1.6347381864623247e-05,
"loss": 1.9889,
"step": 746
},
{
"epoch": 3.6988847583643123,
"grad_norm": 2.520355224609375,
"learning_rate": 1.6091954022988507e-05,
"loss": 1.8721,
"step": 747
},
{
"epoch": 3.703841387856258,
"grad_norm": 2.578373432159424,
"learning_rate": 1.583652618135377e-05,
"loss": 1.6672,
"step": 748
},
{
"epoch": 3.708798017348203,
"grad_norm": 2.8206024169921875,
"learning_rate": 1.558109833971903e-05,
"loss": 2.3436,
"step": 749
},
{
"epoch": 3.7137546468401488,
"grad_norm": 2.4661734104156494,
"learning_rate": 1.532567049808429e-05,
"loss": 1.8102,
"step": 750
},
{
"epoch": 3.718711276332094,
"grad_norm": 2.458994150161743,
"learning_rate": 1.5070242656449553e-05,
"loss": 2.2049,
"step": 751
},
{
"epoch": 3.7236679058240396,
"grad_norm": 2.699479579925537,
"learning_rate": 1.4814814814814815e-05,
"loss": 1.8601,
"step": 752
},
{
"epoch": 3.7286245353159853,
"grad_norm": 2.7305147647857666,
"learning_rate": 1.4559386973180078e-05,
"loss": 2.0493,
"step": 753
},
{
"epoch": 3.7335811648079305,
"grad_norm": 2.724635601043701,
"learning_rate": 1.4303959131545338e-05,
"loss": 1.9252,
"step": 754
},
{
"epoch": 3.738537794299876,
"grad_norm": 2.5254998207092285,
"learning_rate": 1.4048531289910602e-05,
"loss": 1.7042,
"step": 755
},
{
"epoch": 3.7434944237918213,
"grad_norm": 2.5693461894989014,
"learning_rate": 1.3793103448275863e-05,
"loss": 1.9372,
"step": 756
},
{
"epoch": 3.748451053283767,
"grad_norm": 3.0173473358154297,
"learning_rate": 1.3537675606641127e-05,
"loss": 2.0502,
"step": 757
},
{
"epoch": 3.7534076827757126,
"grad_norm": 2.4707775115966797,
"learning_rate": 1.3282247765006387e-05,
"loss": 1.7202,
"step": 758
},
{
"epoch": 3.758364312267658,
"grad_norm": 2.898653268814087,
"learning_rate": 1.3026819923371647e-05,
"loss": 1.9074,
"step": 759
},
{
"epoch": 3.7633209417596034,
"grad_norm": 2.3936469554901123,
"learning_rate": 1.277139208173691e-05,
"loss": 1.7491,
"step": 760
},
{
"epoch": 3.768277571251549,
"grad_norm": 2.245955228805542,
"learning_rate": 1.2515964240102172e-05,
"loss": 1.8061,
"step": 761
},
{
"epoch": 3.7732342007434942,
"grad_norm": 2.400726079940796,
"learning_rate": 1.2260536398467433e-05,
"loss": 1.7594,
"step": 762
},
{
"epoch": 3.77819083023544,
"grad_norm": 2.5081756114959717,
"learning_rate": 1.2005108556832695e-05,
"loss": 1.7719,
"step": 763
},
{
"epoch": 3.7831474597273855,
"grad_norm": 2.9115960597991943,
"learning_rate": 1.1749680715197957e-05,
"loss": 2.307,
"step": 764
},
{
"epoch": 3.7881040892193307,
"grad_norm": 2.9008727073669434,
"learning_rate": 1.1494252873563218e-05,
"loss": 2.3572,
"step": 765
},
{
"epoch": 3.7930607187112764,
"grad_norm": 2.6959750652313232,
"learning_rate": 1.123882503192848e-05,
"loss": 2.3963,
"step": 766
},
{
"epoch": 3.7980173482032216,
"grad_norm": 2.554203748703003,
"learning_rate": 1.0983397190293743e-05,
"loss": 2.0953,
"step": 767
},
{
"epoch": 3.802973977695167,
"grad_norm": 3.1628901958465576,
"learning_rate": 1.0727969348659005e-05,
"loss": 2.322,
"step": 768
},
{
"epoch": 3.807930607187113,
"grad_norm": 2.8941919803619385,
"learning_rate": 1.0472541507024267e-05,
"loss": 2.4856,
"step": 769
},
{
"epoch": 3.8128872366790585,
"grad_norm": 2.6858787536621094,
"learning_rate": 1.0217113665389528e-05,
"loss": 1.7071,
"step": 770
},
{
"epoch": 3.8178438661710037,
"grad_norm": 2.8813092708587646,
"learning_rate": 9.96168582375479e-06,
"loss": 2.1721,
"step": 771
},
{
"epoch": 3.8228004956629493,
"grad_norm": 2.395799398422241,
"learning_rate": 9.706257982120052e-06,
"loss": 2.0502,
"step": 772
},
{
"epoch": 3.8277571251548945,
"grad_norm": 2.8856003284454346,
"learning_rate": 9.450830140485313e-06,
"loss": 2.2581,
"step": 773
},
{
"epoch": 3.83271375464684,
"grad_norm": 2.4333102703094482,
"learning_rate": 9.195402298850575e-06,
"loss": 2.0938,
"step": 774
},
{
"epoch": 3.837670384138786,
"grad_norm": 2.266862392425537,
"learning_rate": 8.939974457215837e-06,
"loss": 2.3068,
"step": 775
},
{
"epoch": 3.842627013630731,
"grad_norm": 2.494243860244751,
"learning_rate": 8.684546615581098e-06,
"loss": 1.687,
"step": 776
},
{
"epoch": 3.8475836431226766,
"grad_norm": 2.6965909004211426,
"learning_rate": 8.429118773946362e-06,
"loss": 2.5372,
"step": 777
},
{
"epoch": 3.852540272614622,
"grad_norm": 2.7889437675476074,
"learning_rate": 8.173690932311623e-06,
"loss": 2.1596,
"step": 778
},
{
"epoch": 3.8574969021065675,
"grad_norm": 2.788628101348877,
"learning_rate": 7.918263090676885e-06,
"loss": 2.2257,
"step": 779
},
{
"epoch": 3.862453531598513,
"grad_norm": 2.956270217895508,
"learning_rate": 7.662835249042145e-06,
"loss": 2.1526,
"step": 780
},
{
"epoch": 3.8674101610904588,
"grad_norm": 2.884690523147583,
"learning_rate": 7.4074074074074075e-06,
"loss": 2.1096,
"step": 781
},
{
"epoch": 3.872366790582404,
"grad_norm": 2.3916282653808594,
"learning_rate": 7.151979565772669e-06,
"loss": 2.0638,
"step": 782
},
{
"epoch": 3.8773234200743496,
"grad_norm": 2.6462628841400146,
"learning_rate": 6.896551724137932e-06,
"loss": 2.0776,
"step": 783
},
{
"epoch": 3.882280049566295,
"grad_norm": 2.674168348312378,
"learning_rate": 6.641123882503193e-06,
"loss": 1.9045,
"step": 784
},
{
"epoch": 3.8872366790582404,
"grad_norm": 2.6898820400238037,
"learning_rate": 6.385696040868455e-06,
"loss": 2.2157,
"step": 785
},
{
"epoch": 3.892193308550186,
"grad_norm": 2.8253626823425293,
"learning_rate": 6.130268199233717e-06,
"loss": 2.042,
"step": 786
},
{
"epoch": 3.8971499380421313,
"grad_norm": 2.905292510986328,
"learning_rate": 5.874840357598978e-06,
"loss": 2.3422,
"step": 787
},
{
"epoch": 3.902106567534077,
"grad_norm": 2.387803316116333,
"learning_rate": 5.61941251596424e-06,
"loss": 1.5696,
"step": 788
},
{
"epoch": 3.907063197026022,
"grad_norm": 2.655133008956909,
"learning_rate": 5.3639846743295025e-06,
"loss": 1.8796,
"step": 789
},
{
"epoch": 3.9120198265179678,
"grad_norm": 2.6278626918792725,
"learning_rate": 5.108556832694764e-06,
"loss": 1.8694,
"step": 790
},
{
"epoch": 3.9169764560099134,
"grad_norm": 2.4667484760284424,
"learning_rate": 4.853128991060026e-06,
"loss": 2.1705,
"step": 791
},
{
"epoch": 3.9219330855018586,
"grad_norm": 2.774759531021118,
"learning_rate": 4.5977011494252875e-06,
"loss": 2.1393,
"step": 792
},
{
"epoch": 3.9268897149938042,
"grad_norm": 2.8241868019104004,
"learning_rate": 4.342273307790549e-06,
"loss": 2.3277,
"step": 793
},
{
"epoch": 3.9318463444857494,
"grad_norm": 3.131408929824829,
"learning_rate": 4.086845466155812e-06,
"loss": 1.9978,
"step": 794
},
{
"epoch": 3.936802973977695,
"grad_norm": 2.5323071479797363,
"learning_rate": 3.8314176245210725e-06,
"loss": 1.7299,
"step": 795
},
{
"epoch": 3.9417596034696407,
"grad_norm": 2.7869760990142822,
"learning_rate": 3.5759897828863346e-06,
"loss": 1.9863,
"step": 796
},
{
"epoch": 3.9467162329615864,
"grad_norm": 2.5885322093963623,
"learning_rate": 3.3205619412515967e-06,
"loss": 1.9325,
"step": 797
},
{
"epoch": 3.9516728624535316,
"grad_norm": 2.5993456840515137,
"learning_rate": 3.0651340996168583e-06,
"loss": 2.6309,
"step": 798
},
{
"epoch": 3.956629491945477,
"grad_norm": 2.5578904151916504,
"learning_rate": 2.80970625798212e-06,
"loss": 1.8496,
"step": 799
},
{
"epoch": 3.9615861214374224,
"grad_norm": 2.696652889251709,
"learning_rate": 2.554278416347382e-06,
"loss": 1.9756,
"step": 800
},
{
"epoch": 3.966542750929368,
"grad_norm": 2.8636531829833984,
"learning_rate": 2.2988505747126437e-06,
"loss": 2.3127,
"step": 801
},
{
"epoch": 3.9714993804213137,
"grad_norm": 2.5958805084228516,
"learning_rate": 2.043422733077906e-06,
"loss": 1.5032,
"step": 802
},
{
"epoch": 3.976456009913259,
"grad_norm": 2.6378211975097656,
"learning_rate": 1.7879948914431673e-06,
"loss": 1.8637,
"step": 803
},
{
"epoch": 3.9814126394052045,
"grad_norm": 3.125774383544922,
"learning_rate": 1.5325670498084292e-06,
"loss": 2.3156,
"step": 804
},
{
"epoch": 3.9863692688971497,
"grad_norm": 2.5696659088134766,
"learning_rate": 1.277139208173691e-06,
"loss": 2.1169,
"step": 805
},
{
"epoch": 3.9913258983890954,
"grad_norm": 2.8002192974090576,
"learning_rate": 1.021711366538953e-06,
"loss": 2.2148,
"step": 806
},
{
"epoch": 3.996282527881041,
"grad_norm": 2.850720167160034,
"learning_rate": 7.662835249042146e-07,
"loss": 1.8883,
"step": 807
},
{
"epoch": 4.0,
"grad_norm": 3.3131906986236572,
"learning_rate": 5.108556832694765e-07,
"loss": 1.8331,
"step": 808
},
{
"epoch": 4.0,
"eval_loss": 2.5112364292144775,
"eval_runtime": 22.6481,
"eval_samples_per_second": 17.838,
"eval_steps_per_second": 2.252,
"step": 808
}
],
"logging_steps": 1,
"max_steps": 808,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 6015167842222080.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}