| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.0, | |
| "global_step": 17365, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1e-05, | |
| "loss": 10.6173, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2e-05, | |
| "loss": 10.5994, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3e-05, | |
| "loss": 10.5936, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4e-05, | |
| "loss": 10.5525, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 10.5214, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6e-05, | |
| "loss": 10.3845, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.000000000000001e-05, | |
| "loss": 9.9854, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 8e-05, | |
| "loss": 9.1209, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 8.999999999999999e-05, | |
| "loss": 8.4798, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001, | |
| "loss": 7.8074, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00011, | |
| "loss": 7.5012, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00012, | |
| "loss": 7.3823, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 7.0647, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00014000000000000001, | |
| "loss": 7.2592, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00015, | |
| "loss": 7.1762, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00016, | |
| "loss": 7.0139, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00017, | |
| "loss": 6.9767, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 7.0242, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019, | |
| "loss": 6.9285, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.0002, | |
| "loss": 6.9331, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00021, | |
| "loss": 6.8345, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00022, | |
| "loss": 6.6742, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00023, | |
| "loss": 6.8299, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00024, | |
| "loss": 6.7196, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00025, | |
| "loss": 6.5603, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00026000000000000003, | |
| "loss": 6.5966, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00027, | |
| "loss": 6.6431, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00028000000000000003, | |
| "loss": 6.3873, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00029, | |
| "loss": 6.3411, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.0003, | |
| "loss": 6.1473, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00031, | |
| "loss": 6.2374, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00032, | |
| "loss": 6.2448, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00033, | |
| "loss": 6.1181, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00034, | |
| "loss": 6.0314, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00035, | |
| "loss": 5.8719, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00035999999999999997, | |
| "loss": 5.8483, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00037, | |
| "loss": 5.8423, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00038, | |
| "loss": 5.8773, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00039000000000000005, | |
| "loss": 5.8504, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0004, | |
| "loss": 5.4737, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00041, | |
| "loss": 5.6913, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00042, | |
| "loss": 5.7444, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00043, | |
| "loss": 5.6371, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00044, | |
| "loss": 5.6059, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00045000000000000004, | |
| "loss": 5.3563, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00046, | |
| "loss": 5.2794, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00047, | |
| "loss": 5.3608, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00048, | |
| "loss": 5.5027, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00049, | |
| "loss": 5.3745, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0005, | |
| "loss": 5.313, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0004998834770449779, | |
| "loss": 5.1712, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0004997669540899557, | |
| "loss": 5.219, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0004996504311349336, | |
| "loss": 5.3264, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0004995339081799114, | |
| "loss": 5.0162, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0004994173852248894, | |
| "loss": 4.986, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0004993008622698671, | |
| "loss": 4.8857, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0004991843393148451, | |
| "loss": 4.8639, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0004990678163598229, | |
| "loss": 5.2248, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0004989512934048008, | |
| "loss": 4.8982, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0004988347704497786, | |
| "loss": 5.0063, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0004987182474947565, | |
| "loss": 4.7881, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0004986017245397343, | |
| "loss": 4.824, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0004984852015847123, | |
| "loss": 4.5973, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00049836867862969, | |
| "loss": 4.5633, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.000498252155674668, | |
| "loss": 4.7742, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0004981356327196458, | |
| "loss": 4.5603, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0004980191097646236, | |
| "loss": 4.5855, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0004979025868096015, | |
| "loss": 4.5635, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0004977860638545793, | |
| "loss": 4.4144, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0004976695408995572, | |
| "loss": 4.6402, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.000497553017944535, | |
| "loss": 4.4075, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0004974364949895129, | |
| "loss": 4.7223, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0004973199720344908, | |
| "loss": 4.383, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0004972034490794687, | |
| "loss": 4.392, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0004970869261244465, | |
| "loss": 4.1944, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0004969704031694244, | |
| "loss": 4.3497, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0004968538802144022, | |
| "loss": 4.0244, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0004967373572593801, | |
| "loss": 4.2946, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0004966208343043579, | |
| "loss": 4.0822, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0004965043113493358, | |
| "loss": 4.0192, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0004963877883943137, | |
| "loss": 4.1678, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0004962712654392916, | |
| "loss": 3.9266, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0004961547424842694, | |
| "loss": 3.9863, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0004960382195292473, | |
| "loss": 3.769, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0004959216965742251, | |
| "loss": 3.8906, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.000495805173619203, | |
| "loss": 3.9499, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0004956886506641808, | |
| "loss": 3.8681, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0004955721277091587, | |
| "loss": 3.8131, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0004954556047541366, | |
| "loss": 4.111, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0004953390817991144, | |
| "loss": 3.5835, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0004952225588440923, | |
| "loss": 3.8712, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0004951060358890702, | |
| "loss": 3.5934, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.000494989512934048, | |
| "loss": 3.6938, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0004948729899790259, | |
| "loss": 3.7079, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0004947564670240037, | |
| "loss": 3.6767, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0004946399440689816, | |
| "loss": 3.731, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0004945234211139595, | |
| "loss": 3.5824, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0004944068981589373, | |
| "loss": 3.3612, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0004942903752039151, | |
| "loss": 3.3219, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0004941738522488931, | |
| "loss": 3.5787, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0004940573292938709, | |
| "loss": 3.4477, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0004939408063388488, | |
| "loss": 3.4272, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0004938242833838266, | |
| "loss": 3.5109, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0004937077604288045, | |
| "loss": 3.0641, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0004935912374737823, | |
| "loss": 3.3225, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0004934747145187602, | |
| "loss": 3.2164, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.000493358191563738, | |
| "loss": 3.2713, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.000493241668608716, | |
| "loss": 3.2336, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0004931251456536937, | |
| "loss": 3.2406, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0004930086226986717, | |
| "loss": 3.2814, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0004928920997436495, | |
| "loss": 3.237, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0004927755767886274, | |
| "loss": 3.1105, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0004926590538336052, | |
| "loss": 3.121, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0004925425308785831, | |
| "loss": 2.8725, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0004924260079235609, | |
| "loss": 3.2262, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0004923094849685389, | |
| "loss": 3.0991, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0004921929620135166, | |
| "loss": 2.8996, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0004920764390584946, | |
| "loss": 3.2996, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0004919599161034724, | |
| "loss": 3.0485, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0004918433931484503, | |
| "loss": 2.9699, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0004917268701934281, | |
| "loss": 2.8221, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.000491610347238406, | |
| "loss": 2.9691, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0004914938242833838, | |
| "loss": 2.8891, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0004913773013283617, | |
| "loss": 2.8668, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0004912607783733395, | |
| "loss": 2.8655, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0004911442554183175, | |
| "loss": 2.8323, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0004910277324632953, | |
| "loss": 2.775, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0004909112095082732, | |
| "loss": 2.7235, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.000490794686553251, | |
| "loss": 2.7463, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0004906781635982289, | |
| "loss": 2.8081, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0004905616406432067, | |
| "loss": 2.7157, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0004904451176881846, | |
| "loss": 2.7714, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0004903285947331624, | |
| "loss": 2.9056, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0004902120717781404, | |
| "loss": 2.7927, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0004900955488231182, | |
| "loss": 2.718, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0004899790258680961, | |
| "loss": 2.7283, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0004898625029130739, | |
| "loss": 2.6543, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0004897459799580518, | |
| "loss": 2.5825, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0004896294570030296, | |
| "loss": 2.6792, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0004895129340480074, | |
| "loss": 2.4749, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0004893964110929853, | |
| "loss": 2.7346, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0004892798881379633, | |
| "loss": 2.4702, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.000489163365182941, | |
| "loss": 2.4849, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.000489046842227919, | |
| "loss": 2.5751, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0004889303192728968, | |
| "loss": 2.7505, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0004888137963178746, | |
| "loss": 2.6167, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0004886972733628525, | |
| "loss": 2.5749, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0004885807504078303, | |
| "loss": 2.4734, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0004884642274528082, | |
| "loss": 2.5399, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.000488347704497786, | |
| "loss": 2.7156, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0004882311815427639, | |
| "loss": 2.6661, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0004881146585877418, | |
| "loss": 2.4842, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00048799813563271964, | |
| "loss": 2.5076, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00048788161267769754, | |
| "loss": 2.4286, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00048776508972267534, | |
| "loss": 2.3796, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00048764856676765324, | |
| "loss": 2.249, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0004875320438126311, | |
| "loss": 2.4257, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.000487415520857609, | |
| "loss": 2.4112, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0004872989979025868, | |
| "loss": 2.2194, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0004871824749475647, | |
| "loss": 2.3593, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00048706595199254254, | |
| "loss": 2.3137, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0004869494290375204, | |
| "loss": 2.3152, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00048683290608249823, | |
| "loss": 2.293, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00048671638312747614, | |
| "loss": 2.3859, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.000486599860172454, | |
| "loss": 2.5195, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00048648333721743183, | |
| "loss": 2.2668, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0004863668142624097, | |
| "loss": 2.3753, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0004862502913073876, | |
| "loss": 2.4052, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00048613376835236543, | |
| "loss": 2.2952, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0004860172453973433, | |
| "loss": 2.2161, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00048590072244232113, | |
| "loss": 2.4161, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00048578419948729903, | |
| "loss": 2.2893, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0004856676765322769, | |
| "loss": 2.2338, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00048555115357725473, | |
| "loss": 1.9338, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0004854346306222326, | |
| "loss": 2.1428, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0004853181076672105, | |
| "loss": 2.0794, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0004852015847121883, | |
| "loss": 2.1585, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0004850850617571662, | |
| "loss": 2.0462, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.000484968538802144, | |
| "loss": 2.1014, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0004848520158471219, | |
| "loss": 2.268, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0004847354928920997, | |
| "loss": 2.11, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0004846189699370776, | |
| "loss": 2.275, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00048450244698205547, | |
| "loss": 1.9513, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0004843859240270334, | |
| "loss": 2.1157, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00048426940107201117, | |
| "loss": 2.243, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00048415287811698907, | |
| "loss": 2.0249, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0004840363551619669, | |
| "loss": 2.2214, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0004839198322069448, | |
| "loss": 2.1383, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0004838033092519226, | |
| "loss": 2.0717, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0004836867862969005, | |
| "loss": 2.0318, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00048357026334187837, | |
| "loss": 2.2616, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00048345374038685627, | |
| "loss": 2.2362, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00048333721743183406, | |
| "loss": 1.9737, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00048322069447681197, | |
| "loss": 1.8598, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0004831041715217898, | |
| "loss": 2.0784, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00048298764856676766, | |
| "loss": 2.1202, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0004828711256117455, | |
| "loss": 2.1851, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00048275460265672336, | |
| "loss": 1.999, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00048263807970170126, | |
| "loss": 2.1535, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00048252155674667906, | |
| "loss": 1.927, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00048240503379165696, | |
| "loss": 1.8046, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0004822885108366348, | |
| "loss": 2.2255, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0004821719878816127, | |
| "loss": 1.8354, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0004820554649265905, | |
| "loss": 1.9391, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0004819389419715684, | |
| "loss": 2.16, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00048182241901654626, | |
| "loss": 2.0378, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00048170589606152416, | |
| "loss": 1.7791, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00048158937310650195, | |
| "loss": 1.9443, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00048147285015147985, | |
| "loss": 2.1102, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.0004813563271964577, | |
| "loss": 1.9356, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0004812398042414356, | |
| "loss": 2.0797, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0004811232812864134, | |
| "loss": 1.9049, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0004810067583313913, | |
| "loss": 2.0014, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00048089023537636915, | |
| "loss": 1.8359, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00048077371242134705, | |
| "loss": 1.8638, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00048065718946632485, | |
| "loss": 1.851, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00048054066651130275, | |
| "loss": 1.813, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0004804241435562806, | |
| "loss": 1.8793, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00048030762060125845, | |
| "loss": 1.8115, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0004801910976462363, | |
| "loss": 1.89, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0004800745746912142, | |
| "loss": 1.6868, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00047995805173619205, | |
| "loss": 1.9705, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0004798415287811699, | |
| "loss": 1.7701, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00047972500582614774, | |
| "loss": 1.8274, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00047960848287112565, | |
| "loss": 1.9784, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0004794919599161035, | |
| "loss": 1.8581, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00047937543696108134, | |
| "loss": 1.7436, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0004792589140060592, | |
| "loss": 1.8933, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0004791423910510371, | |
| "loss": 1.8863, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00047902586809601494, | |
| "loss": 1.6754, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0004789093451409928, | |
| "loss": 1.8452, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00047879282218597064, | |
| "loss": 1.7726, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00047867629923094854, | |
| "loss": 1.6902, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0004785597762759264, | |
| "loss": 1.7203, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00047844325332090424, | |
| "loss": 1.9117, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0004783267303658821, | |
| "loss": 1.7419, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00047821020741086, | |
| "loss": 1.6398, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0004780936844558378, | |
| "loss": 1.8232, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0004779771615008157, | |
| "loss": 1.811, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00047786063854579353, | |
| "loss": 1.7618, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00047774411559077144, | |
| "loss": 1.8015, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00047762759263574923, | |
| "loss": 1.6506, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.0004775110696807271, | |
| "loss": 1.584, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.000477394546725705, | |
| "loss": 1.7166, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00047727802377068283, | |
| "loss": 1.6907, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0004771615008156607, | |
| "loss": 1.6075, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0004770449778606385, | |
| "loss": 1.6789, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00047692845490561643, | |
| "loss": 1.6016, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0004768119319505943, | |
| "loss": 1.737, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.0004766954089955721, | |
| "loss": 1.8943, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00047657888604055, | |
| "loss": 1.8478, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.0004764623630855279, | |
| "loss": 1.6695, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00047634584013050567, | |
| "loss": 1.6879, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0004762293171754836, | |
| "loss": 1.6278, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0004761127942204614, | |
| "loss": 1.7355, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0004759962712654393, | |
| "loss": 1.8658, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0004758797483104171, | |
| "loss": 1.6834, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.000475763225355395, | |
| "loss": 1.6484, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00047564670240037287, | |
| "loss": 1.6275, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00047553017944535077, | |
| "loss": 1.6433, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00047541365649032857, | |
| "loss": 1.6679, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00047529713353530647, | |
| "loss": 1.615, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0004751806105802843, | |
| "loss": 1.6623, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0004750640876252622, | |
| "loss": 1.5745, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00047494756467024, | |
| "loss": 1.3781, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0004748310417152179, | |
| "loss": 1.6752, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00047471451876019577, | |
| "loss": 1.7025, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00047459799580517367, | |
| "loss": 1.6855, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00047448147285015146, | |
| "loss": 1.6679, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00047436494989512936, | |
| "loss": 1.6746, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0004742484269401072, | |
| "loss": 1.5759, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00047413190398508506, | |
| "loss": 1.3709, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0004740153810300629, | |
| "loss": 1.7639, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0004738988580750408, | |
| "loss": 1.3738, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00047378233512001866, | |
| "loss": 1.4433, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0004736658121649965, | |
| "loss": 1.4435, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00047354928920997436, | |
| "loss": 1.4654, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00047343276625495226, | |
| "loss": 1.5323, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0004733162432999301, | |
| "loss": 1.7076, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00047319972034490796, | |
| "loss": 1.6969, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0004730831973898858, | |
| "loss": 1.7028, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0004729666744348637, | |
| "loss": 1.5403, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00047285015147984156, | |
| "loss": 1.5397, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0004727336285248194, | |
| "loss": 1.5315, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00047261710556979725, | |
| "loss": 1.4734, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00047250058261477516, | |
| "loss": 1.7396, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.000472384059659753, | |
| "loss": 1.476, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00047226753670473085, | |
| "loss": 1.5518, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0004721510137497087, | |
| "loss": 1.3857, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0004720344907946866, | |
| "loss": 1.6634, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0004719179678396644, | |
| "loss": 1.6851, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00047180144488464225, | |
| "loss": 1.5682, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00047168492192962015, | |
| "loss": 1.4285, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.000471568398974598, | |
| "loss": 1.53, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00047145187601957585, | |
| "loss": 1.6139, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.0004713353530645537, | |
| "loss": 1.7024, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.0004712188301095316, | |
| "loss": 1.5183, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00047110230715450944, | |
| "loss": 1.6133, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0004709857841994873, | |
| "loss": 1.7082, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00047086926124446514, | |
| "loss": 1.4825, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00047075273828944304, | |
| "loss": 1.3294, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0004706362153344209, | |
| "loss": 1.5075, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00047051969237939874, | |
| "loss": 1.5766, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0004704031694243766, | |
| "loss": 1.5618, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0004702866464693545, | |
| "loss": 1.6336, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00047017012351433234, | |
| "loss": 1.4952, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.0004700536005593102, | |
| "loss": 1.4659, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00046993707760428804, | |
| "loss": 1.6056, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00046982055464926594, | |
| "loss": 1.5604, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00046970403169424373, | |
| "loss": 1.5006, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00046958750873922164, | |
| "loss": 1.431, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.0004694709857841995, | |
| "loss": 1.3415, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.0004693544628291774, | |
| "loss": 1.5172, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.0004692379398741552, | |
| "loss": 1.3327, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.0004691214169191331, | |
| "loss": 1.4461, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00046900489396411093, | |
| "loss": 1.3816, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00046888837100908884, | |
| "loss": 1.5017, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00046877184805406663, | |
| "loss": 1.3864, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00046865532509904453, | |
| "loss": 1.5376, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0004685388021440224, | |
| "loss": 1.3746, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0004684222791890003, | |
| "loss": 1.2224, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0004683057562339781, | |
| "loss": 1.3624, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.000468189233278956, | |
| "loss": 1.5236, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00046807271032393383, | |
| "loss": 1.3457, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00046795618736891173, | |
| "loss": 1.5382, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.0004678396644138895, | |
| "loss": 1.4564, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00046772314145886743, | |
| "loss": 1.3994, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0004676066185038453, | |
| "loss": 1.3252, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0004674900955488231, | |
| "loss": 1.464, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00046737357259380097, | |
| "loss": 1.2866, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0004672570496387789, | |
| "loss": 1.566, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0004671405266837567, | |
| "loss": 1.5683, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00046702400372873457, | |
| "loss": 1.2691, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0004669074807737124, | |
| "loss": 1.3731, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0004667909578186903, | |
| "loss": 1.3997, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00046667443486366817, | |
| "loss": 1.5178, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.000466557911908646, | |
| "loss": 1.4758, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00046644138895362387, | |
| "loss": 1.2601, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00046632486599860177, | |
| "loss": 1.5102, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0004662083430435796, | |
| "loss": 1.5268, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0004660918200885574, | |
| "loss": 1.4082, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.0004659752971335353, | |
| "loss": 1.3945, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00046585877417851316, | |
| "loss": 1.4644, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00046574225122349107, | |
| "loss": 1.3928, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00046562572826846886, | |
| "loss": 1.3034, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00046550920531344676, | |
| "loss": 1.3036, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0004653926823584246, | |
| "loss": 1.5186, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00046527615940340246, | |
| "loss": 1.3525, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0004651596364483803, | |
| "loss": 1.4504, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0004650431134933582, | |
| "loss": 1.2229, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00046492659053833606, | |
| "loss": 1.2556, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0004648100675833139, | |
| "loss": 1.2992, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00046469354462829176, | |
| "loss": 1.256, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00046457702167326966, | |
| "loss": 1.4052, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0004644604987182475, | |
| "loss": 1.4992, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00046434397576322536, | |
| "loss": 1.2673, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0004642274528082032, | |
| "loss": 1.3817, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0004641109298531811, | |
| "loss": 1.159, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00046399440689815896, | |
| "loss": 1.3467, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0004638778839431368, | |
| "loss": 1.3512, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00046376136098811465, | |
| "loss": 1.31, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00046364483803309255, | |
| "loss": 1.2474, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0004635283150780704, | |
| "loss": 1.3939, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00046341179212304825, | |
| "loss": 1.3823, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0004632952691680261, | |
| "loss": 1.3648, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.000463178746213004, | |
| "loss": 1.1649, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0004630622232579818, | |
| "loss": 1.2892, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0004629457003029597, | |
| "loss": 1.3038, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00046282917734793755, | |
| "loss": 1.2314, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00046271265439291545, | |
| "loss": 1.1314, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00046259613143789324, | |
| "loss": 1.3848, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00046247960848287115, | |
| "loss": 1.3334, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.000462363085527849, | |
| "loss": 1.2365, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0004622465625728269, | |
| "loss": 1.2673, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0004621300396178047, | |
| "loss": 1.2443, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0004620135166627826, | |
| "loss": 1.2275, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00046189699370776044, | |
| "loss": 1.3776, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00046178047075273835, | |
| "loss": 1.4364, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00046166394779771614, | |
| "loss": 1.3173, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00046154742484269404, | |
| "loss": 1.3461, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.0004614309018876719, | |
| "loss": 1.3649, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00046131437893264974, | |
| "loss": 1.3683, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.0004611978559776276, | |
| "loss": 1.3727, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.0004610813330226055, | |
| "loss": 1.1828, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00046096481006758334, | |
| "loss": 1.19, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0004608482871125612, | |
| "loss": 1.3646, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00046073176415753904, | |
| "loss": 1.1841, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0004606152412025169, | |
| "loss": 1.1462, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.0004604987182474948, | |
| "loss": 1.1347, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.0004603821952924726, | |
| "loss": 1.17, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.0004602656723374505, | |
| "loss": 1.2542, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00046014914938242833, | |
| "loss": 1.4318, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00046003262642740623, | |
| "loss": 1.3082, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00045991610347238403, | |
| "loss": 1.2716, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00045979958051736193, | |
| "loss": 1.2412, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.0004596830575623398, | |
| "loss": 1.3664, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.0004595665346073177, | |
| "loss": 1.2538, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0004594500116522955, | |
| "loss": 1.2536, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0004593334886972734, | |
| "loss": 1.1971, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0004592169657422512, | |
| "loss": 1.2953, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0004591004427872291, | |
| "loss": 1.158, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0004589839198322069, | |
| "loss": 1.5582, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0004588673968771848, | |
| "loss": 1.1798, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0004587508739221627, | |
| "loss": 1.184, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0004586343509671405, | |
| "loss": 1.3451, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00045851782801211837, | |
| "loss": 1.2911, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0004584013050570963, | |
| "loss": 1.3576, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0004582847821020741, | |
| "loss": 1.3262, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00045816825914705197, | |
| "loss": 1.2768, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0004580517361920298, | |
| "loss": 1.1113, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.0004579352132370077, | |
| "loss": 1.3998, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00045781869028198557, | |
| "loss": 1.4512, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.0004577021673269634, | |
| "loss": 1.2316, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00045758564437194127, | |
| "loss": 1.1764, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00045746912141691917, | |
| "loss": 1.0234, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.000457352598461897, | |
| "loss": 1.2533, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00045723607550687487, | |
| "loss": 1.1746, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.0004571195525518527, | |
| "loss": 1.057, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0004570030295968306, | |
| "loss": 1.2712, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0004568865066418084, | |
| "loss": 1.1473, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0004567699836867863, | |
| "loss": 1.3898, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00045665346073176416, | |
| "loss": 1.3249, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00045653693777674206, | |
| "loss": 1.2951, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00045642041482171986, | |
| "loss": 1.1313, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00045630389186669776, | |
| "loss": 1.234, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.0004561873689116756, | |
| "loss": 1.2596, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.0004560708459566535, | |
| "loss": 1.2363, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0004559543230016313, | |
| "loss": 1.0825, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0004558378000466092, | |
| "loss": 1.1746, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00045572127709158706, | |
| "loss": 1.3537, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00045560475413656496, | |
| "loss": 1.1291, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00045548823118154275, | |
| "loss": 1.165, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00045537170822652066, | |
| "loss": 1.2263, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0004552551852714985, | |
| "loss": 1.1819, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_Bleu_1": 0.004562230879638888, | |
| "eval_Bleu_2": 1.832697466618859e-11, | |
| "eval_Bleu_3": 3.2617476367192263e-14, | |
| "eval_Bleu_4": 1.4785080931925696e-15, | |
| "eval_ROUGE_L": 0.007574926108851014, | |
| "eval_cer": 0.6058397695082681, | |
| "eval_em": 0.4438259109311741, | |
| "eval_f1": 0.6230237395969866, | |
| "eval_loss": 1.157799243927002, | |
| "eval_runtime": 355.2702, | |
| "eval_samples_per_second": 16.686, | |
| "eval_steps_per_second": 3.338, | |
| "eval_wer": 0.7307975977241598, | |
| "step": 4341 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0004551386623164764, | |
| "loss": 1.0483, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0004550221393614542, | |
| "loss": 0.9632, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00045490561640643205, | |
| "loss": 1.0083, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00045478909345140995, | |
| "loss": 0.861, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00045467257049638775, | |
| "loss": 0.9485, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00045455604754136565, | |
| "loss": 0.899, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0004544395245863435, | |
| "loss": 0.9807, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0004543230016313214, | |
| "loss": 0.9033, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0004542064786762992, | |
| "loss": 0.8492, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0004540899557212771, | |
| "loss": 0.9586, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00045397343276625495, | |
| "loss": 0.9832, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00045385690981123285, | |
| "loss": 0.8696, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00045374038685621064, | |
| "loss": 0.8504, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00045362386390118855, | |
| "loss": 0.8982, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.0004535073409461664, | |
| "loss": 0.9312, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0004533908179911443, | |
| "loss": 0.8821, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0004532742950361221, | |
| "loss": 0.9766, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0004531577720811, | |
| "loss": 0.8721, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00045304124912607784, | |
| "loss": 0.9112, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00045292472617105574, | |
| "loss": 0.9845, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00045280820321603354, | |
| "loss": 0.8542, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00045269168026101144, | |
| "loss": 0.7962, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0004525751573059893, | |
| "loss": 1.0538, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00045245863435096714, | |
| "loss": 0.8934, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.000452342111395945, | |
| "loss": 0.7979, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.0004522255884409229, | |
| "loss": 0.8273, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00045210906548590074, | |
| "loss": 0.9187, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.0004519925425308786, | |
| "loss": 0.8956, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00045187601957585643, | |
| "loss": 0.8595, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00045175949662083434, | |
| "loss": 0.9169, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0004516429736658122, | |
| "loss": 1.0031, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00045152645071079003, | |
| "loss": 0.9034, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0004514099277557679, | |
| "loss": 0.9478, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0004512934048007458, | |
| "loss": 1.0032, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00045117688184572363, | |
| "loss": 0.8712, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0004510603588907015, | |
| "loss": 1.0547, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00045094383593567933, | |
| "loss": 0.8324, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00045082731298065723, | |
| "loss": 0.8245, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.0004507107900256351, | |
| "loss": 0.9643, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00045059426707061293, | |
| "loss": 0.9543, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.0004504777441155908, | |
| "loss": 0.8372, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0004503612211605687, | |
| "loss": 0.8607, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0004502446982055465, | |
| "loss": 0.9938, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0004501281752505244, | |
| "loss": 0.847, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0004500116522955022, | |
| "loss": 1.0042, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00044989512934048013, | |
| "loss": 0.798, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0004497786063854579, | |
| "loss": 1.0318, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0004496620834304358, | |
| "loss": 0.9688, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00044954556047541367, | |
| "loss": 0.9583, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0004494290375203916, | |
| "loss": 0.8226, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00044931251456536937, | |
| "loss": 0.9248, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0004491959916103472, | |
| "loss": 0.8285, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0004490794686553251, | |
| "loss": 0.7634, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00044896294570030297, | |
| "loss": 0.8457, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.0004488464227452808, | |
| "loss": 1.01, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00044872989979025867, | |
| "loss": 0.8146, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00044861337683523657, | |
| "loss": 0.895, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00044849685388021436, | |
| "loss": 0.8456, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00044838033092519226, | |
| "loss": 0.8982, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0004482638079701701, | |
| "loss": 1.0002, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.000448147285015148, | |
| "loss": 0.9766, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0004480307620601258, | |
| "loss": 0.8912, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0004479142391051037, | |
| "loss": 0.9753, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00044779771615008156, | |
| "loss": 0.9715, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00044768119319505946, | |
| "loss": 0.9285, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00044756467024003726, | |
| "loss": 0.9086, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00044744814728501516, | |
| "loss": 0.9184, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.000447331624329993, | |
| "loss": 0.7753, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.0004472151013749709, | |
| "loss": 0.8555, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.0004470985784199487, | |
| "loss": 0.8246, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.0004469820554649266, | |
| "loss": 0.9383, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00044686553250990446, | |
| "loss": 0.9073, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00044674900955488236, | |
| "loss": 0.8755, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00044663248659986015, | |
| "loss": 0.9361, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00044651596364483806, | |
| "loss": 0.8497, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.0004463994406898159, | |
| "loss": 1.0263, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00044628291773479375, | |
| "loss": 1.0664, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0004461663947797716, | |
| "loss": 0.9687, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0004460498718247495, | |
| "loss": 0.9282, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00044593334886972735, | |
| "loss": 0.9985, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0004458168259147052, | |
| "loss": 0.9896, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00044570030295968305, | |
| "loss": 0.9017, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00044558378000466095, | |
| "loss": 0.8536, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0004454672570496388, | |
| "loss": 0.6861, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00044535073409461665, | |
| "loss": 0.9811, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.0004452342111395945, | |
| "loss": 0.8626, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.0004451176881845724, | |
| "loss": 0.8912, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00044500116522955025, | |
| "loss": 0.9315, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.0004448846422745281, | |
| "loss": 0.9026, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00044476811931950594, | |
| "loss": 0.9576, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00044465159636448385, | |
| "loss": 0.9743, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0004445350734094617, | |
| "loss": 0.8514, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00044441855045443954, | |
| "loss": 0.7854, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.0004443020274994174, | |
| "loss": 0.9221, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.0004441855045443953, | |
| "loss": 0.9368, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.0004440689815893731, | |
| "loss": 0.8692, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.000443952458634351, | |
| "loss": 0.94, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00044383593567932884, | |
| "loss": 0.888, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0004437194127243067, | |
| "loss": 0.8532, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00044360288976928454, | |
| "loss": 0.8541, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0004434863668142624, | |
| "loss": 0.7617, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0004433698438592403, | |
| "loss": 0.9921, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00044325332090421814, | |
| "loss": 0.9467, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.000443136797949196, | |
| "loss": 1.0556, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00044302027499417383, | |
| "loss": 0.8402, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00044290375203915173, | |
| "loss": 0.9111, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0004427872290841296, | |
| "loss": 0.9139, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00044267070612910743, | |
| "loss": 0.8888, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0004425541831740853, | |
| "loss": 0.8642, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0004424376602190632, | |
| "loss": 1.0252, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00044232113726404103, | |
| "loss": 0.8512, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0004422046143090189, | |
| "loss": 0.8371, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00044208809135399673, | |
| "loss": 0.9973, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00044197156839897463, | |
| "loss": 0.9172, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0004418550454439524, | |
| "loss": 0.9916, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.0004417385224889303, | |
| "loss": 1.0238, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.0004416219995339082, | |
| "loss": 0.8475, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.0004415054765788861, | |
| "loss": 0.9013, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00044138895362386387, | |
| "loss": 0.9205, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.0004412724306688418, | |
| "loss": 0.7697, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.0004411559077138196, | |
| "loss": 0.9124, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.0004410393847587975, | |
| "loss": 1.0096, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.0004409228618037753, | |
| "loss": 1.0114, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.0004408063388487532, | |
| "loss": 0.8859, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00044068981589373107, | |
| "loss": 1.1045, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.000440573292938709, | |
| "loss": 0.8819, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00044045676998368677, | |
| "loss": 0.9543, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00044034024702866467, | |
| "loss": 0.8881, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0004402237240736425, | |
| "loss": 0.8167, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0004401072011186204, | |
| "loss": 0.8464, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0004399906781635982, | |
| "loss": 0.8874, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0004398741552085761, | |
| "loss": 0.9212, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00043975763225355397, | |
| "loss": 0.868, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.0004396411092985318, | |
| "loss": 0.9033, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00043952458634350966, | |
| "loss": 0.9567, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00043940806338848757, | |
| "loss": 0.9298, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0004392915404334654, | |
| "loss": 0.8746, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00043917501747844326, | |
| "loss": 0.9638, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0004390584945234211, | |
| "loss": 0.8256, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.000438941971568399, | |
| "loss": 0.9559, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00043882544861337686, | |
| "loss": 0.866, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0004387089256583547, | |
| "loss": 0.83, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00043859240270333256, | |
| "loss": 0.8454, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00043847587974831046, | |
| "loss": 0.8315, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0004383593567932883, | |
| "loss": 0.9442, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0004382428338382661, | |
| "loss": 0.808, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.000438126310883244, | |
| "loss": 1.0284, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.00043800978792822185, | |
| "loss": 0.882, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.00043789326497319976, | |
| "loss": 0.9005, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00043777674201817755, | |
| "loss": 0.8253, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00043766021906315545, | |
| "loss": 0.995, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.0004375436961081333, | |
| "loss": 0.8564, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00043742717315311115, | |
| "loss": 1.0516, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.000437310650198089, | |
| "loss": 0.8854, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.0004371941272430669, | |
| "loss": 0.9493, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00043707760428804475, | |
| "loss": 0.9142, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.0004369610813330226, | |
| "loss": 0.8572, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00043684455837800045, | |
| "loss": 0.7193, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00043672803542297835, | |
| "loss": 0.9045, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.0004366115124679562, | |
| "loss": 0.9262, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00043649498951293405, | |
| "loss": 0.7985, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.0004363784665579119, | |
| "loss": 0.7903, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.0004362619436028898, | |
| "loss": 0.8705, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00043614542064786765, | |
| "loss": 0.9021, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.0004360288976928455, | |
| "loss": 0.7522, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00043591237473782334, | |
| "loss": 0.9521, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00043579585178280124, | |
| "loss": 0.9453, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0004356793288277791, | |
| "loss": 0.81, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00043556280587275694, | |
| "loss": 0.9281, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0004354462829177348, | |
| "loss": 0.8549, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0004353297599627127, | |
| "loss": 0.9825, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0004352132370076905, | |
| "loss": 0.945, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0004350967140526684, | |
| "loss": 0.6956, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00043498019109764624, | |
| "loss": 0.9248, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00043486366814262414, | |
| "loss": 0.8503, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00043474714518760193, | |
| "loss": 0.9704, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00043463062223257984, | |
| "loss": 0.9158, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0004345140992775577, | |
| "loss": 0.8862, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0004343975763225356, | |
| "loss": 0.7859, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0004342810533675134, | |
| "loss": 0.8887, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.0004341645304124913, | |
| "loss": 0.7453, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00043404800745746913, | |
| "loss": 0.8689, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00043393148450244704, | |
| "loss": 0.7721, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00043381496154742483, | |
| "loss": 0.843, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00043369843859240273, | |
| "loss": 0.9345, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0004335819156373806, | |
| "loss": 0.8555, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00043346539268235843, | |
| "loss": 0.8983, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0004333488697273363, | |
| "loss": 0.8739, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0004332323467723142, | |
| "loss": 0.9557, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00043311582381729203, | |
| "loss": 0.9242, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0004329993008622699, | |
| "loss": 0.778, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0004328827779072477, | |
| "loss": 0.8425, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00043276625495222563, | |
| "loss": 0.8424, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.0004326497319972035, | |
| "loss": 0.9171, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.00043253320904218127, | |
| "loss": 0.9715, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.0004324166860871592, | |
| "loss": 0.7575, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.000432300163132137, | |
| "loss": 0.8489, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.0004321836401771149, | |
| "loss": 0.8243, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.0004320671172220927, | |
| "loss": 0.8604, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.0004319505942670706, | |
| "loss": 0.9974, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00043183407131204847, | |
| "loss": 0.9065, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00043171754835702637, | |
| "loss": 0.8044, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00043160102540200417, | |
| "loss": 0.9111, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00043148450244698207, | |
| "loss": 0.9364, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.0004313679794919599, | |
| "loss": 0.8497, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00043125145653693777, | |
| "loss": 0.8877, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0004311349335819156, | |
| "loss": 0.8173, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0004310184106268935, | |
| "loss": 0.946, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00043090188767187136, | |
| "loss": 0.9376, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0004307853647168492, | |
| "loss": 0.85, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00043066884176182706, | |
| "loss": 0.8629, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00043055231880680496, | |
| "loss": 0.8033, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.0004304357958517828, | |
| "loss": 1.0201, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00043031927289676066, | |
| "loss": 0.8825, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.0004302027499417385, | |
| "loss": 0.8825, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.0004300862269867164, | |
| "loss": 0.7078, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00042996970403169426, | |
| "loss": 0.8709, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.0004298531810766721, | |
| "loss": 0.8811, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00042973665812164996, | |
| "loss": 0.7586, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00042962013516662786, | |
| "loss": 0.925, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0004295036122116057, | |
| "loss": 0.8418, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00042938708925658356, | |
| "loss": 0.806, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0004292705663015614, | |
| "loss": 0.864, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.0004291540433465393, | |
| "loss": 0.8085, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.0004290375203915171, | |
| "loss": 1.0106, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.000428920997436495, | |
| "loss": 0.9069, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00042880447448147285, | |
| "loss": 0.8129, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00042868795152645076, | |
| "loss": 0.846, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00042857142857142855, | |
| "loss": 0.8213, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00042845490561640645, | |
| "loss": 0.821, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.0004283383826613843, | |
| "loss": 0.8194, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.0004282218597063622, | |
| "loss": 0.7394, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00042810533675134, | |
| "loss": 0.9139, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0004279888137963179, | |
| "loss": 0.8334, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00042787229084129575, | |
| "loss": 0.7381, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00042775576788627365, | |
| "loss": 0.7279, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.00042763924493125144, | |
| "loss": 0.9201, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.00042752272197622935, | |
| "loss": 0.8286, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.0004274061990212072, | |
| "loss": 0.8908, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.0004272896760661851, | |
| "loss": 0.8951, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.0004271731531111629, | |
| "loss": 0.8557, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.0004270566301561408, | |
| "loss": 0.8737, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00042694010720111864, | |
| "loss": 0.8117, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00042682358424609644, | |
| "loss": 0.8245, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00042670706129107434, | |
| "loss": 0.8187, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.0004265905383360522, | |
| "loss": 0.7613, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.0004264740153810301, | |
| "loss": 0.9373, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.0004263574924260079, | |
| "loss": 0.9169, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.0004262409694709858, | |
| "loss": 0.831, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00042612444651596364, | |
| "loss": 0.8466, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00042600792356094154, | |
| "loss": 0.8031, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00042589140060591933, | |
| "loss": 1.0256, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00042577487765089724, | |
| "loss": 0.8324, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.0004256583546958751, | |
| "loss": 0.8445, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.000425541831740853, | |
| "loss": 0.8516, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0004254253087858308, | |
| "loss": 0.6436, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0004253087858308087, | |
| "loss": 0.8925, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.00042519226287578653, | |
| "loss": 0.8912, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.00042507573992076443, | |
| "loss": 0.959, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.00042495921696574223, | |
| "loss": 0.9026, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.00042484269401072013, | |
| "loss": 0.8484, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.000424726171055698, | |
| "loss": 0.9602, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.00042460964810067583, | |
| "loss": 0.7942, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.0004244931251456537, | |
| "loss": 0.9194, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.0004243766021906316, | |
| "loss": 0.8724, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.00042426007923560943, | |
| "loss": 0.8077, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.0004241435562805873, | |
| "loss": 0.8402, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0004240270333255651, | |
| "loss": 0.8272, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.000423910510370543, | |
| "loss": 0.8768, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0004237939874155209, | |
| "loss": 0.8291, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0004236774644604987, | |
| "loss": 0.8288, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.00042356094150547657, | |
| "loss": 0.8869, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.0004234444185504545, | |
| "loss": 0.9815, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.0004233278955954323, | |
| "loss": 0.8464, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.00042321137264041017, | |
| "loss": 0.8099, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.000423094849685388, | |
| "loss": 0.8248, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0004229783267303659, | |
| "loss": 0.8718, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.00042286180377534377, | |
| "loss": 0.674, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0004227452808203216, | |
| "loss": 0.8456, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.00042262875786529947, | |
| "loss": 0.8607, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00042251223491027737, | |
| "loss": 0.7995, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00042239571195525516, | |
| "loss": 0.8364, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00042227918900023307, | |
| "loss": 0.8575, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.0004221626660452109, | |
| "loss": 0.7937, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0004220461430901888, | |
| "loss": 0.7883, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0004219296201351666, | |
| "loss": 0.8459, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0004218130971801445, | |
| "loss": 0.7338, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.00042169657422512236, | |
| "loss": 0.8326, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00042158005127010027, | |
| "loss": 0.7968, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00042146352831507806, | |
| "loss": 0.8392, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.0004213470053600559, | |
| "loss": 0.7962, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.0004212304824050338, | |
| "loss": 0.8887, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00042111395945001166, | |
| "loss": 0.8364, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.0004209974364949895, | |
| "loss": 0.8338, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00042088091353996736, | |
| "loss": 0.825, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00042076439058494526, | |
| "loss": 0.7979, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.0004206478676299231, | |
| "loss": 0.8145, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00042053134467490096, | |
| "loss": 0.9179, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0004204148217198788, | |
| "loss": 0.9284, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0004202982987648567, | |
| "loss": 0.7933, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0004201817758098345, | |
| "loss": 0.8116, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.0004200652528548124, | |
| "loss": 0.9992, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00041994872989979025, | |
| "loss": 0.7485, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00041983220694476815, | |
| "loss": 0.8439, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00041971568398974595, | |
| "loss": 0.8971, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00041959916103472385, | |
| "loss": 0.8182, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0004194826380797017, | |
| "loss": 0.9844, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0004193661151246796, | |
| "loss": 0.7211, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0004192495921696574, | |
| "loss": 0.9946, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0004191330692146353, | |
| "loss": 0.7788, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00041901654625961315, | |
| "loss": 0.7851, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00041890002330459105, | |
| "loss": 0.8839, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00041878350034956884, | |
| "loss": 0.7923, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00041866697739454675, | |
| "loss": 0.9646, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.0004185504544395246, | |
| "loss": 0.8403, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00041843393148450244, | |
| "loss": 0.8778, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.0004183174085294803, | |
| "loss": 0.8131, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.0004182008855744582, | |
| "loss": 0.8474, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00041808436261943604, | |
| "loss": 0.742, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.0004179678396644139, | |
| "loss": 0.8989, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00041785131670939174, | |
| "loss": 0.9878, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00041773479375436964, | |
| "loss": 0.8157, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.0004176182707993475, | |
| "loss": 0.7535, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.00041750174784432534, | |
| "loss": 0.7385, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0004173852248893032, | |
| "loss": 0.7585, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0004172687019342811, | |
| "loss": 0.7945, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.00041715217897925894, | |
| "loss": 0.9057, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.0004170356560242368, | |
| "loss": 0.9158, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00041691913306921463, | |
| "loss": 0.7643, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00041680261011419254, | |
| "loss": 0.9082, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.0004166860871591704, | |
| "loss": 0.8074, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00041656956420414823, | |
| "loss": 0.8786, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.0004164530412491261, | |
| "loss": 0.8093, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.000416336518294104, | |
| "loss": 0.8411, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.0004162199953390818, | |
| "loss": 0.8258, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.0004161034723840597, | |
| "loss": 0.8163, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00041598694942903753, | |
| "loss": 0.9144, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00041587042647401543, | |
| "loss": 0.7808, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.0004157539035189932, | |
| "loss": 0.7541, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.0004156373805639711, | |
| "loss": 0.7985, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.000415520857608949, | |
| "loss": 0.7638, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0004154043346539268, | |
| "loss": 0.7434, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0004152878116989047, | |
| "loss": 0.7973, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0004151712887438825, | |
| "loss": 0.7924, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0004150547657888604, | |
| "loss": 0.8787, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.0004149382428338383, | |
| "loss": 0.8441, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.0004148217198788161, | |
| "loss": 0.8471, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00041470519692379397, | |
| "loss": 0.8283, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.0004145886739687719, | |
| "loss": 0.7841, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0004144721510137497, | |
| "loss": 0.8118, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.00041435562805872757, | |
| "loss": 0.9017, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0004142391051037054, | |
| "loss": 0.7363, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0004141225821486833, | |
| "loss": 0.7727, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.0004140060591936611, | |
| "loss": 0.7529, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.000413889536238639, | |
| "loss": 0.8297, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00041377301328361687, | |
| "loss": 0.8453, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00041365649032859477, | |
| "loss": 0.7607, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00041353996737357256, | |
| "loss": 0.787, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00041342344441855047, | |
| "loss": 0.8017, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.0004133069214635283, | |
| "loss": 0.6971, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.0004131903985085062, | |
| "loss": 0.8197, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.000413073875553484, | |
| "loss": 0.7113, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.0004129573525984619, | |
| "loss": 0.871, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00041284082964343976, | |
| "loss": 0.7696, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00041272430668841766, | |
| "loss": 0.7699, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00041260778373339546, | |
| "loss": 0.9623, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.00041249126077837336, | |
| "loss": 0.7063, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0004123747378233512, | |
| "loss": 0.811, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0004122582148683291, | |
| "loss": 0.8056, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0004121416919133069, | |
| "loss": 0.808, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.0004120251689582848, | |
| "loss": 0.841, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00041190864600326266, | |
| "loss": 0.7707, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.0004117921230482405, | |
| "loss": 0.7437, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00041167560009321835, | |
| "loss": 0.8966, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00041155907713819626, | |
| "loss": 0.7847, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.0004114425541831741, | |
| "loss": 0.7915, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00041132603122815195, | |
| "loss": 0.8354, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.0004112095082731298, | |
| "loss": 0.7693, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.0004110929853181077, | |
| "loss": 0.7187, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00041097646236308555, | |
| "loss": 0.7765, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0004108599394080634, | |
| "loss": 0.8497, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00041074341645304125, | |
| "loss": 0.7102, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00041062689349801915, | |
| "loss": 0.746, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.000410510370542997, | |
| "loss": 0.8032, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00041039384758797485, | |
| "loss": 0.8194, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.0004102773246329527, | |
| "loss": 0.8415, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.0004101608016779306, | |
| "loss": 0.7877, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00041004427872290845, | |
| "loss": 0.8101, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00040992775576788624, | |
| "loss": 0.6995, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00040981123281286414, | |
| "loss": 0.8027, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.000409694709857842, | |
| "loss": 0.8378, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00040957818690281984, | |
| "loss": 0.7723, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.0004094616639477977, | |
| "loss": 0.7575, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.0004093451409927756, | |
| "loss": 0.7697, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00040922861803775344, | |
| "loss": 0.8557, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.0004091120950827313, | |
| "loss": 0.8575, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00040899557212770914, | |
| "loss": 0.8342, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00040887904917268704, | |
| "loss": 0.7827, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.0004087625262176649, | |
| "loss": 0.822, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00040864600326264274, | |
| "loss": 0.7081, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.0004085294803076206, | |
| "loss": 0.8076, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.0004084129573525985, | |
| "loss": 0.7991, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00040829643439757634, | |
| "loss": 0.7308, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.0004081799114425542, | |
| "loss": 0.8929, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00040806338848753203, | |
| "loss": 0.7839, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00040794686553250994, | |
| "loss": 0.8352, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.0004078303425774878, | |
| "loss": 0.8239, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00040771381962246563, | |
| "loss": 0.9054, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.0004075972966674435, | |
| "loss": 1.0012, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.0004074807737124214, | |
| "loss": 0.7988, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.0004073642507573992, | |
| "loss": 0.7011, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.0004072477278023771, | |
| "loss": 0.7976, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00040713120484735493, | |
| "loss": 0.7668, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00040701468189233283, | |
| "loss": 0.887, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.0004068981589373106, | |
| "loss": 0.7291, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00040678163598228853, | |
| "loss": 0.7659, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.0004066651130272664, | |
| "loss": 0.7434, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.0004065485900722443, | |
| "loss": 0.779, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.0004064320671172221, | |
| "loss": 0.717, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0004063155441622, | |
| "loss": 0.6918, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0004061990212071778, | |
| "loss": 0.8615, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0004060824982521557, | |
| "loss": 0.8585, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0004059659752971335, | |
| "loss": 0.8218, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0004058494523421114, | |
| "loss": 0.8093, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00040573292938708927, | |
| "loss": 0.8097, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0004056164064320672, | |
| "loss": 0.8217, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00040549988347704497, | |
| "loss": 0.915, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00040538336052202287, | |
| "loss": 0.7121, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0004052668375670007, | |
| "loss": 0.7469, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00040515031461197857, | |
| "loss": 0.7618, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0004050337916569564, | |
| "loss": 0.7916, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0004049172687019343, | |
| "loss": 0.7881, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.00040480074574691217, | |
| "loss": 0.932, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.00040468422279189, | |
| "loss": 0.8939, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_Bleu_1": 0.0061907631622195695, | |
| "eval_Bleu_2": 2.2410988520716052e-11, | |
| "eval_Bleu_3": 3.913134630236392e-14, | |
| "eval_Bleu_4": 1.783965909910145e-15, | |
| "eval_ROUGE_L": 0.009656478129255157, | |
| "eval_cer": 0.5242234220274556, | |
| "eval_em": 0.509952766531714, | |
| "eval_f1": 0.6806222594586169, | |
| "eval_loss": 0.9325827360153198, | |
| "eval_runtime": 333.3233, | |
| "eval_samples_per_second": 17.785, | |
| "eval_steps_per_second": 3.558, | |
| "eval_wer": 0.6251185333473818, | |
| "step": 8682 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.00040456769983686786, | |
| "loss": 0.5991, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0004044511768818457, | |
| "loss": 0.5123, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0004043346539268236, | |
| "loss": 0.4804, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0004042181309718014, | |
| "loss": 0.5649, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0004041016080167793, | |
| "loss": 0.5594, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.00040398508506175716, | |
| "loss": 0.6121, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00040386856210673506, | |
| "loss": 0.4305, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00040375203915171286, | |
| "loss": 0.5781, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00040363551619669076, | |
| "loss": 0.5346, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.0004035189932416686, | |
| "loss": 0.5311, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00040340247028664646, | |
| "loss": 0.5012, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0004032859473316243, | |
| "loss": 0.5934, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0004031694243766022, | |
| "loss": 0.5671, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.00040305290142158006, | |
| "loss": 0.518, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0004029363784665579, | |
| "loss": 0.552, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00040281985551153575, | |
| "loss": 0.5729, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00040270333255651365, | |
| "loss": 0.4851, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.0004025868096014915, | |
| "loss": 0.4946, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00040247028664646935, | |
| "loss": 0.6723, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.0004023537636914472, | |
| "loss": 0.4706, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.0004022372407364251, | |
| "loss": 0.6072, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00040212071778140295, | |
| "loss": 0.5382, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.0004020041948263808, | |
| "loss": 0.5631, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00040188767187135865, | |
| "loss": 0.5934, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00040177114891633655, | |
| "loss": 0.5487, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.0004016546259613144, | |
| "loss": 0.5286, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00040153810300629225, | |
| "loss": 0.5221, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.0004014215800512701, | |
| "loss": 0.527, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.000401305057096248, | |
| "loss": 0.4899, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.0004011885341412258, | |
| "loss": 0.5208, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.0004010720111862037, | |
| "loss": 0.5884, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00040095548823118154, | |
| "loss": 0.5225, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00040083896527615945, | |
| "loss": 0.5612, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00040072244232113724, | |
| "loss": 0.6135, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00040060591936611514, | |
| "loss": 0.4754, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.000400489396411093, | |
| "loss": 0.4943, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.0004003728734560709, | |
| "loss": 0.5197, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.0004002563505010487, | |
| "loss": 0.5642, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.0004001398275460266, | |
| "loss": 0.5568, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.00040002330459100444, | |
| "loss": 0.5518, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.00039990678163598234, | |
| "loss": 0.6001, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.00039979025868096014, | |
| "loss": 0.5839, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.00039967373572593804, | |
| "loss": 0.6053, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.0003995572127709159, | |
| "loss": 0.5571, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.0003994406898158938, | |
| "loss": 0.551, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0003993241668608716, | |
| "loss": 0.5591, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0003992076439058495, | |
| "loss": 0.5571, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.00039909112095082733, | |
| "loss": 0.55, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.00039897459799580513, | |
| "loss": 0.6671, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.00039885807504078303, | |
| "loss": 0.6238, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.0003987415520857609, | |
| "loss": 0.4604, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.0003986250291307388, | |
| "loss": 0.576, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.0003985085061757166, | |
| "loss": 0.553, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.0003983919832206945, | |
| "loss": 0.5679, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.0003982754602656723, | |
| "loss": 0.6115, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.00039815893731065023, | |
| "loss": 0.4596, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.000398042414355628, | |
| "loss": 0.4852, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.0003979258914006059, | |
| "loss": 0.5255, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0003978093684455838, | |
| "loss": 0.518, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0003976928454905617, | |
| "loss": 0.5075, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.00039757632253553947, | |
| "loss": 0.6621, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0003974597995805174, | |
| "loss": 0.5596, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0003973432766254952, | |
| "loss": 0.4777, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.0003972267536704731, | |
| "loss": 0.5863, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.0003971102307154509, | |
| "loss": 0.6219, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.0003969937077604288, | |
| "loss": 0.5444, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.00039687718480540667, | |
| "loss": 0.4376, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.0003967606618503845, | |
| "loss": 0.6091, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.00039664413889536237, | |
| "loss": 0.4934, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.00039652761594034027, | |
| "loss": 0.5768, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.0003964110929853181, | |
| "loss": 0.5099, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.00039629457003029597, | |
| "loss": 0.577, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.0003961780470752738, | |
| "loss": 0.5887, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.0003960615241202517, | |
| "loss": 0.5794, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.00039594500116522957, | |
| "loss": 0.5668, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.0003958284782102074, | |
| "loss": 0.4765, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00039571195525518526, | |
| "loss": 0.5262, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00039559543230016317, | |
| "loss": 0.6219, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.000395478909345141, | |
| "loss": 0.5818, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00039536238639011886, | |
| "loss": 0.5223, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.0003952458634350967, | |
| "loss": 0.5095, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.0003951293404800746, | |
| "loss": 0.5033, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.00039501281752505246, | |
| "loss": 0.5708, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.0003948962945700303, | |
| "loss": 0.5583, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00039477977161500816, | |
| "loss": 0.5332, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00039466324865998606, | |
| "loss": 0.6822, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00039454672570496385, | |
| "loss": 0.4858, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00039443020274994176, | |
| "loss": 0.5302, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.0003943136797949196, | |
| "loss": 0.5078, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.0003941971568398975, | |
| "loss": 0.5107, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.0003940806338848753, | |
| "loss": 0.5105, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.0003939641109298532, | |
| "loss": 0.6579, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00039384758797483105, | |
| "loss": 0.5097, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.00039373106501980896, | |
| "loss": 0.575, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.00039361454206478675, | |
| "loss": 0.6123, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.00039349801910976465, | |
| "loss": 0.5684, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.0003933814961547425, | |
| "loss": 0.4895, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0003932649731997204, | |
| "loss": 0.5427, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0003931484502446982, | |
| "loss": 0.577, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.00039303192728967605, | |
| "loss": 0.6597, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.00039291540433465395, | |
| "loss": 0.5126, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0003927988813796318, | |
| "loss": 0.5427, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.00039268235842460965, | |
| "loss": 0.5729, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0003925658354695875, | |
| "loss": 0.5166, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0003924493125145654, | |
| "loss": 0.4773, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0003923327895595432, | |
| "loss": 0.6146, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.0003922162666045211, | |
| "loss": 0.5842, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.00039209974364949894, | |
| "loss": 0.5618, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.00039198322069447684, | |
| "loss": 0.5513, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.00039186669773945464, | |
| "loss": 0.4993, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00039175017478443254, | |
| "loss": 0.632, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.0003916336518294104, | |
| "loss": 0.5667, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.0003915171288743883, | |
| "loss": 0.5593, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.0003914006059193661, | |
| "loss": 0.5384, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.000391284082964344, | |
| "loss": 0.4804, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00039116756000932184, | |
| "loss": 0.5944, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00039105103705429974, | |
| "loss": 0.5851, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00039093451409927753, | |
| "loss": 0.4737, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00039081799114425544, | |
| "loss": 0.645, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.0003907014681892333, | |
| "loss": 0.6055, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.0003905849452342112, | |
| "loss": 0.4633, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.000390468422279189, | |
| "loss": 0.6038, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.0003903518993241669, | |
| "loss": 0.5083, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.00039023537636914473, | |
| "loss": 0.588, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.0003901188534141226, | |
| "loss": 0.6723, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.00039000233045910043, | |
| "loss": 0.513, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.00038988580750407833, | |
| "loss": 0.5506, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.0003897692845490562, | |
| "loss": 0.595, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.00038965276159403403, | |
| "loss": 0.548, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.0003895362386390119, | |
| "loss": 0.4758, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.0003894197156839898, | |
| "loss": 0.5779, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.00038930319272896763, | |
| "loss": 0.5395, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.0003891866697739455, | |
| "loss": 0.6145, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.0003890701468189233, | |
| "loss": 0.598, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00038895362386390123, | |
| "loss": 0.6676, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.0003888371009088791, | |
| "loss": 0.5585, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.0003887205779538569, | |
| "loss": 0.5398, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.00038860405499883477, | |
| "loss": 0.5792, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.0003884875320438127, | |
| "loss": 0.5499, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.00038837100908879047, | |
| "loss": 0.5817, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.00038825448613376837, | |
| "loss": 0.6104, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.0003881379631787462, | |
| "loss": 0.6221, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.0003880214402237241, | |
| "loss": 0.5982, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.0003879049172687019, | |
| "loss": 0.5207, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.0003877883943136798, | |
| "loss": 0.5528, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00038767187135865767, | |
| "loss": 0.5491, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00038755534840363557, | |
| "loss": 0.6077, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00038743882544861336, | |
| "loss": 0.4959, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.0003873223024935912, | |
| "loss": 0.5356, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.0003872057795385691, | |
| "loss": 0.5207, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00038708925658354696, | |
| "loss": 0.4693, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.0003869727336285248, | |
| "loss": 0.5714, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00038685621067350266, | |
| "loss": 0.5049, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00038673968771848056, | |
| "loss": 0.6123, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.0003866231647634584, | |
| "loss": 0.4514, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.00038650664180843626, | |
| "loss": 0.5208, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.0003863901188534141, | |
| "loss": 0.6155, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.000386273595898392, | |
| "loss": 0.4555, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.0003861570729433698, | |
| "loss": 0.475, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.0003860405499883477, | |
| "loss": 0.5104, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00038592402703332556, | |
| "loss": 0.5335, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00038580750407830346, | |
| "loss": 0.5669, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00038569098112328125, | |
| "loss": 0.488, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.00038557445816825916, | |
| "loss": 0.5095, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.000385457935213237, | |
| "loss": 0.5865, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.0003853414122582149, | |
| "loss": 0.5239, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.0003852248893031927, | |
| "loss": 0.6548, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.0003851083663481706, | |
| "loss": 0.5929, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00038499184339314845, | |
| "loss": 0.5913, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00038487532043812635, | |
| "loss": 0.5078, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00038475879748310415, | |
| "loss": 0.5233, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.00038464227452808205, | |
| "loss": 0.4629, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.0003845257515730599, | |
| "loss": 0.5157, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.0003844092286180378, | |
| "loss": 0.5218, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.0003842927056630156, | |
| "loss": 0.5072, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.0003841761827079935, | |
| "loss": 0.6127, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00038405965975297135, | |
| "loss": 0.5584, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.0003839431367979492, | |
| "loss": 0.5054, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00038382661384292704, | |
| "loss": 0.5534, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00038371009088790495, | |
| "loss": 0.5365, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0003835935679328828, | |
| "loss": 0.5946, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.00038347704497786064, | |
| "loss": 0.6109, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0003833605220228385, | |
| "loss": 0.6428, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0003832439990678164, | |
| "loss": 0.5929, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.00038312747611279424, | |
| "loss": 0.5548, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.0003830109531577721, | |
| "loss": 0.5551, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.00038289443020274994, | |
| "loss": 0.6649, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.00038277790724772784, | |
| "loss": 0.5881, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.0003826613842927057, | |
| "loss": 0.6012, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.00038254486133768354, | |
| "loss": 0.5753, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.0003824283383826614, | |
| "loss": 0.5004, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.0003823118154276393, | |
| "loss": 0.6546, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.00038219529247261714, | |
| "loss": 0.6297, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00038207876951759493, | |
| "loss": 0.5892, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00038196224656257284, | |
| "loss": 0.5858, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.0003818457236075507, | |
| "loss": 0.5571, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00038172920065252853, | |
| "loss": 0.6245, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.0003816126776975064, | |
| "loss": 0.5944, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.0003814961547424843, | |
| "loss": 0.5134, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.00038137963178746213, | |
| "loss": 0.6271, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.00038126310883244, | |
| "loss": 0.5594, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.00038114658587741783, | |
| "loss": 0.6394, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.00038103006292239573, | |
| "loss": 0.5259, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.0003809135399673736, | |
| "loss": 0.5958, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.00038079701701235143, | |
| "loss": 0.6885, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.0003806804940573293, | |
| "loss": 0.5735, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.0003805639711023072, | |
| "loss": 0.5948, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.000380447448147285, | |
| "loss": 0.4641, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.0003803309251922629, | |
| "loss": 0.5771, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.0003802144022372407, | |
| "loss": 0.5432, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.0003800978792822186, | |
| "loss": 0.5765, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.0003799813563271965, | |
| "loss": 0.6005, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.0003798648333721743, | |
| "loss": 0.4986, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.00037974831041715217, | |
| "loss": 0.5309, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.0003796317874621301, | |
| "loss": 0.5259, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00037951526450710787, | |
| "loss": 0.4959, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00037939874155208577, | |
| "loss": 0.5123, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.0003792822185970636, | |
| "loss": 0.5946, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.0003791656956420415, | |
| "loss": 0.5913, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.0003790491726870193, | |
| "loss": 0.5857, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.0003789326497319972, | |
| "loss": 0.5496, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00037881612677697507, | |
| "loss": 0.5672, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00037869960382195297, | |
| "loss": 0.5571, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.00037858308086693076, | |
| "loss": 0.5366, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.00037846655791190867, | |
| "loss": 0.5317, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.0003783500349568865, | |
| "loss": 0.506, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.0003782335120018644, | |
| "loss": 0.6632, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.0003781169890468422, | |
| "loss": 0.548, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.0003780004660918201, | |
| "loss": 0.4903, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.00037788394313679796, | |
| "loss": 0.4934, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.00037776742018177586, | |
| "loss": 0.5766, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.00037765089722675366, | |
| "loss": 0.5807, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.00037753437427173156, | |
| "loss": 0.537, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.0003774178513167094, | |
| "loss": 0.4981, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.00037730132836168726, | |
| "loss": 0.6566, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.0003771848054066651, | |
| "loss": 0.5681, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.000377068282451643, | |
| "loss": 0.5734, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.00037695175949662086, | |
| "loss": 0.4979, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.0003768352365415987, | |
| "loss": 0.5895, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.00037671871358657655, | |
| "loss": 0.6714, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.00037660219063155446, | |
| "loss": 0.66, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.0003764856676765323, | |
| "loss": 0.5396, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.0003763691447215101, | |
| "loss": 0.5783, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.000376252621766488, | |
| "loss": 0.5514, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.00037613609881146585, | |
| "loss": 0.7365, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00037601957585644375, | |
| "loss": 0.602, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00037590305290142155, | |
| "loss": 0.7115, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00037578652994639945, | |
| "loss": 0.5712, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.0003756700069913773, | |
| "loss": 0.6067, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.0003755534840363552, | |
| "loss": 0.5876, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.000375436961081333, | |
| "loss": 0.5376, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.0003753204381263109, | |
| "loss": 0.686, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.00037520391517128875, | |
| "loss": 0.511, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.0003750873922162666, | |
| "loss": 0.5709, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00037497086926124444, | |
| "loss": 0.5382, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00037485434630622235, | |
| "loss": 0.6335, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.0003747378233512002, | |
| "loss": 0.6076, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00037462130039617804, | |
| "loss": 0.741, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.0003745047774411559, | |
| "loss": 0.6341, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.0003743882544861338, | |
| "loss": 0.5657, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.00037427173153111164, | |
| "loss": 0.6267, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.0003741552085760895, | |
| "loss": 0.6451, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.00037403868562106734, | |
| "loss": 0.5806, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.00037392216266604524, | |
| "loss": 0.6063, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.0003738056397110231, | |
| "loss": 0.6284, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.00037368911675600094, | |
| "loss": 0.4528, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.0003735725938009788, | |
| "loss": 0.6778, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0003734560708459567, | |
| "loss": 0.6654, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0003733395478909345, | |
| "loss": 0.5415, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0003732230249359124, | |
| "loss": 0.537, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.00037310650198089023, | |
| "loss": 0.5016, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.00037298997902586814, | |
| "loss": 0.5729, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.00037287345607084593, | |
| "loss": 0.5655, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.00037275693311582383, | |
| "loss": 0.7096, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.0003726404101608017, | |
| "loss": 0.6308, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.0003725238872057796, | |
| "loss": 0.505, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.0003724073642507574, | |
| "loss": 0.6018, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.0003722908412957353, | |
| "loss": 0.5669, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.00037217431834071313, | |
| "loss": 0.6607, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.00037205779538569103, | |
| "loss": 0.4848, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.0003719412724306688, | |
| "loss": 0.6305, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.00037182474947564673, | |
| "loss": 0.6011, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.0003717082265206246, | |
| "loss": 0.5572, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.0003715917035656025, | |
| "loss": 0.5913, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.0003714751806105803, | |
| "loss": 0.6331, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.0003713586576555582, | |
| "loss": 0.5433, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.000371242134700536, | |
| "loss": 0.5089, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.0003711256117455139, | |
| "loss": 0.4861, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.0003710090887904917, | |
| "loss": 0.602, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.0003708925658354696, | |
| "loss": 0.4407, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00037077604288044747, | |
| "loss": 0.6049, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00037065951992542527, | |
| "loss": 0.6089, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00037054299697040317, | |
| "loss": 0.4972, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.000370426474015381, | |
| "loss": 0.5839, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.0003703099510603589, | |
| "loss": 0.5832, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.0003701934281053367, | |
| "loss": 0.5513, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.0003700769051503146, | |
| "loss": 0.5815, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.00036996038219529247, | |
| "loss": 0.6414, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.00036984385924027037, | |
| "loss": 0.4812, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.00036972733628524816, | |
| "loss": 0.5898, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.00036961081333022606, | |
| "loss": 0.6138, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.0003694942903752039, | |
| "loss": 0.5094, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.0003693777674201818, | |
| "loss": 0.6374, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.0003692612444651596, | |
| "loss": 0.5514, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.0003691447215101375, | |
| "loss": 0.4668, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.00036902819855511536, | |
| "loss": 0.5207, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.0003689116756000932, | |
| "loss": 0.5768, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.00036879515264507106, | |
| "loss": 0.5295, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.00036867862969004896, | |
| "loss": 0.5756, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.0003685621067350268, | |
| "loss": 0.5905, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.00036844558378000466, | |
| "loss": 0.5398, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.0003683290608249825, | |
| "loss": 0.5105, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.0003682125378699604, | |
| "loss": 0.449, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.00036809601491493826, | |
| "loss": 0.6313, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.0003679794919599161, | |
| "loss": 0.5288, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.00036786296900489395, | |
| "loss": 0.5334, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.00036774644604987186, | |
| "loss": 0.5581, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.0003676299230948497, | |
| "loss": 0.5738, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.00036751340013982755, | |
| "loss": 0.5783, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.0003673968771848054, | |
| "loss": 0.5406, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.0003672803542297833, | |
| "loss": 0.6214, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.00036716383127476115, | |
| "loss": 0.5412, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.000367047308319739, | |
| "loss": 0.6317, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.00036693078536471685, | |
| "loss": 0.617, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.00036681426240969475, | |
| "loss": 0.5979, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.00036669773945467255, | |
| "loss": 0.5684, | |
| "step": 11940 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.00036658121649965045, | |
| "loss": 0.632, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.0003664646935446283, | |
| "loss": 0.544, | |
| "step": 11960 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.0003663481705896062, | |
| "loss": 0.591, | |
| "step": 11970 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.000366231647634584, | |
| "loss": 0.5811, | |
| "step": 11980 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.0003661151246795619, | |
| "loss": 0.5066, | |
| "step": 11990 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.00036599860172453974, | |
| "loss": 0.5227, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.00036588207876951765, | |
| "loss": 0.6378, | |
| "step": 12010 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.00036576555581449544, | |
| "loss": 0.5862, | |
| "step": 12020 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.00036564903285947334, | |
| "loss": 0.6071, | |
| "step": 12030 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.0003655325099044512, | |
| "loss": 0.5584, | |
| "step": 12040 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.0003654159869494291, | |
| "loss": 0.6494, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.0003652994639944069, | |
| "loss": 0.5216, | |
| "step": 12060 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.0003651829410393848, | |
| "loss": 0.5979, | |
| "step": 12070 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.00036506641808436264, | |
| "loss": 0.5455, | |
| "step": 12080 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.0003649498951293405, | |
| "loss": 0.6383, | |
| "step": 12090 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.00036483337217431834, | |
| "loss": 0.4883, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.0003647168492192962, | |
| "loss": 0.6385, | |
| "step": 12110 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.0003646003262642741, | |
| "loss": 0.6161, | |
| "step": 12120 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.0003644838033092519, | |
| "loss": 0.6531, | |
| "step": 12130 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.0003643672803542298, | |
| "loss": 0.7073, | |
| "step": 12140 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.00036425075739920763, | |
| "loss": 0.6292, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.00036413423444418553, | |
| "loss": 0.532, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.00036401771148916333, | |
| "loss": 0.5598, | |
| "step": 12170 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.00036390118853414123, | |
| "loss": 0.5761, | |
| "step": 12180 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.0003637846655791191, | |
| "loss": 0.6883, | |
| "step": 12190 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.000363668142624097, | |
| "loss": 0.531, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.0003635516196690748, | |
| "loss": 0.5152, | |
| "step": 12210 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.0003634350967140527, | |
| "loss": 0.4439, | |
| "step": 12220 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.00036331857375903053, | |
| "loss": 0.6116, | |
| "step": 12230 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.00036320205080400843, | |
| "loss": 0.575, | |
| "step": 12240 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.0003630855278489862, | |
| "loss": 0.6519, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.00036296900489396413, | |
| "loss": 0.7286, | |
| "step": 12260 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.000362852481938942, | |
| "loss": 0.6153, | |
| "step": 12270 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.0003627359589839199, | |
| "loss": 0.6239, | |
| "step": 12280 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.00036261943602889767, | |
| "loss": 0.5667, | |
| "step": 12290 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.0003625029130738756, | |
| "loss": 0.5172, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.0003623863901188534, | |
| "loss": 0.536, | |
| "step": 12310 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.00036226986716383127, | |
| "loss": 0.4811, | |
| "step": 12320 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.0003621533442088091, | |
| "loss": 0.5181, | |
| "step": 12330 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.000362036821253787, | |
| "loss": 0.6049, | |
| "step": 12340 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.00036192029829876487, | |
| "loss": 0.6295, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.0003618037753437427, | |
| "loss": 0.5561, | |
| "step": 12360 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.00036168725238872057, | |
| "loss": 0.5883, | |
| "step": 12370 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.00036157072943369847, | |
| "loss": 0.5432, | |
| "step": 12380 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.0003614542064786763, | |
| "loss": 0.4879, | |
| "step": 12390 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.00036133768352365417, | |
| "loss": 0.5342, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.000361221160568632, | |
| "loss": 0.6041, | |
| "step": 12410 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.0003611046376136099, | |
| "loss": 0.5651, | |
| "step": 12420 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.00036098811465858777, | |
| "loss": 0.5147, | |
| "step": 12430 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.0003608715917035656, | |
| "loss": 0.5625, | |
| "step": 12440 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.00036075506874854346, | |
| "loss": 0.5852, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.00036063854579352137, | |
| "loss": 0.5484, | |
| "step": 12460 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.0003605220228384992, | |
| "loss": 0.5484, | |
| "step": 12470 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.00036040549988347706, | |
| "loss": 0.7071, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.0003602889769284549, | |
| "loss": 0.5853, | |
| "step": 12490 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.0003601724539734328, | |
| "loss": 0.5038, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.0003600559310184106, | |
| "loss": 0.5228, | |
| "step": 12510 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.0003599394080633885, | |
| "loss": 0.6229, | |
| "step": 12520 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.00035982288510836636, | |
| "loss": 0.6315, | |
| "step": 12530 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.00035970636215334426, | |
| "loss": 0.5097, | |
| "step": 12540 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.00035958983919832206, | |
| "loss": 0.5466, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.0003594733162432999, | |
| "loss": 0.5656, | |
| "step": 12560 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.0003593567932882778, | |
| "loss": 0.6161, | |
| "step": 12570 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.00035924027033325565, | |
| "loss": 0.5725, | |
| "step": 12580 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.0003591237473782335, | |
| "loss": 0.5583, | |
| "step": 12590 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.00035900722442321135, | |
| "loss": 0.5669, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.00035889070146818925, | |
| "loss": 0.5418, | |
| "step": 12610 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.0003587741785131671, | |
| "loss": 0.5785, | |
| "step": 12620 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.00035865765555814495, | |
| "loss": 0.5889, | |
| "step": 12630 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.0003585411326031228, | |
| "loss": 0.5348, | |
| "step": 12640 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.0003584246096481007, | |
| "loss": 0.5271, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 0.0003583080866930785, | |
| "loss": 0.5257, | |
| "step": 12660 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 0.0003581915637380564, | |
| "loss": 0.571, | |
| "step": 12670 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 0.00035807504078303425, | |
| "loss": 0.553, | |
| "step": 12680 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 0.00035795851782801215, | |
| "loss": 0.5103, | |
| "step": 12690 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.00035784199487298994, | |
| "loss": 0.5788, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.00035772547191796785, | |
| "loss": 0.5475, | |
| "step": 12710 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.0003576089489629457, | |
| "loss": 0.5787, | |
| "step": 12720 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.0003574924260079236, | |
| "loss": 0.6733, | |
| "step": 12730 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.0003573759030529014, | |
| "loss": 0.5088, | |
| "step": 12740 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.0003572593800978793, | |
| "loss": 0.5526, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.00035714285714285714, | |
| "loss": 0.5531, | |
| "step": 12760 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.00035702633418783505, | |
| "loss": 0.5685, | |
| "step": 12770 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.00035690981123281284, | |
| "loss": 0.5849, | |
| "step": 12780 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 0.00035679328827779074, | |
| "loss": 0.598, | |
| "step": 12790 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 0.0003566767653227686, | |
| "loss": 0.6349, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 0.0003565602423677465, | |
| "loss": 0.5224, | |
| "step": 12810 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 0.0003564437194127243, | |
| "loss": 0.5802, | |
| "step": 12820 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 0.0003563271964577022, | |
| "loss": 0.6304, | |
| "step": 12830 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 0.00035621067350268004, | |
| "loss": 0.6912, | |
| "step": 12840 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 0.0003560941505476579, | |
| "loss": 0.5772, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 0.00035597762759263573, | |
| "loss": 0.5425, | |
| "step": 12860 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 0.00035586110463761364, | |
| "loss": 0.6595, | |
| "step": 12870 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.0003557445816825915, | |
| "loss": 0.5792, | |
| "step": 12880 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.00035562805872756933, | |
| "loss": 0.5013, | |
| "step": 12890 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.0003555115357725472, | |
| "loss": 0.7614, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 0.0003553950128175251, | |
| "loss": 0.5491, | |
| "step": 12910 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.00035527848986250293, | |
| "loss": 0.5925, | |
| "step": 12920 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.0003551619669074808, | |
| "loss": 0.4564, | |
| "step": 12930 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.00035504544395245863, | |
| "loss": 0.5478, | |
| "step": 12940 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.00035492892099743653, | |
| "loss": 0.567, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.0003548123980424144, | |
| "loss": 0.6399, | |
| "step": 12960 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.00035469587508739223, | |
| "loss": 0.5897, | |
| "step": 12970 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.0003545793521323701, | |
| "loss": 0.5591, | |
| "step": 12980 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.000354462829177348, | |
| "loss": 0.597, | |
| "step": 12990 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 0.00035434630622232583, | |
| "loss": 0.5599, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0003542297832673037, | |
| "loss": 0.7182, | |
| "step": 13010 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0003541132603122815, | |
| "loss": 0.6147, | |
| "step": 13020 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_Bleu_1": 0.004867795110078279, | |
| "eval_Bleu_2": 2.0016032894805002e-11, | |
| "eval_Bleu_3": 3.641044386384566e-14, | |
| "eval_Bleu_4": 1.688390378855272e-15, | |
| "eval_ROUGE_L": 0.0076157806660195025, | |
| "eval_cer": 0.4872366010539993, | |
| "eval_em": 0.5393049932523617, | |
| "eval_f1": 0.7065047406756789, | |
| "eval_loss": 0.8688169717788696, | |
| "eval_runtime": 329.7388, | |
| "eval_samples_per_second": 17.978, | |
| "eval_steps_per_second": 3.597, | |
| "eval_wer": 0.5752291644716047, | |
| "step": 13023 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.00035399673735725943, | |
| "loss": 0.4246, | |
| "step": 13030 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0003538802144022372, | |
| "loss": 0.3049, | |
| "step": 13040 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 0.00035376369144721507, | |
| "loss": 0.3106, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 0.000353647168492193, | |
| "loss": 0.3329, | |
| "step": 13060 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 0.0003535306455371708, | |
| "loss": 0.481, | |
| "step": 13070 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 0.00035341412258214867, | |
| "loss": 0.3728, | |
| "step": 13080 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.0003532975996271265, | |
| "loss": 0.3044, | |
| "step": 13090 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.0003531810766721044, | |
| "loss": 0.4434, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.00035306455371708227, | |
| "loss": 0.397, | |
| "step": 13110 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.0003529480307620601, | |
| "loss": 0.4271, | |
| "step": 13120 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 0.00035283150780703797, | |
| "loss": 0.3274, | |
| "step": 13130 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 0.00035271498485201587, | |
| "loss": 0.3705, | |
| "step": 13140 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 0.0003525984618969937, | |
| "loss": 0.3008, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 0.00035248193894197157, | |
| "loss": 0.3671, | |
| "step": 13160 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 0.0003523654159869494, | |
| "loss": 0.3638, | |
| "step": 13170 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 0.0003522488930319273, | |
| "loss": 0.2577, | |
| "step": 13180 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 0.00035213237007690517, | |
| "loss": 0.3867, | |
| "step": 13190 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 0.000352015847121883, | |
| "loss": 0.411, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 0.00035189932416686086, | |
| "loss": 0.3793, | |
| "step": 13210 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.00035178280121183876, | |
| "loss": 0.3405, | |
| "step": 13220 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.00035166627825681656, | |
| "loss": 0.3649, | |
| "step": 13230 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.00035154975530179446, | |
| "loss": 0.3443, | |
| "step": 13240 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.0003514332323467723, | |
| "loss": 0.467, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.0003513167093917502, | |
| "loss": 0.3444, | |
| "step": 13260 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 0.000351200186436728, | |
| "loss": 0.3961, | |
| "step": 13270 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 0.0003510836634817059, | |
| "loss": 0.3478, | |
| "step": 13280 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 0.00035096714052668376, | |
| "loss": 0.3768, | |
| "step": 13290 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 0.00035085061757166166, | |
| "loss": 0.3578, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 0.00035073409461663945, | |
| "loss": 0.3998, | |
| "step": 13310 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 0.00035061757166161736, | |
| "loss": 0.4058, | |
| "step": 13320 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 0.0003505010487065952, | |
| "loss": 0.3761, | |
| "step": 13330 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 0.0003503845257515731, | |
| "loss": 0.3347, | |
| "step": 13340 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 0.0003502680027965509, | |
| "loss": 0.3976, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 0.0003501514798415288, | |
| "loss": 0.3189, | |
| "step": 13360 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 0.00035003495688650665, | |
| "loss": 0.302, | |
| "step": 13370 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 0.00034991843393148456, | |
| "loss": 0.3786, | |
| "step": 13380 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 0.00034980191097646235, | |
| "loss": 0.4283, | |
| "step": 13390 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 0.00034968538802144025, | |
| "loss": 0.4397, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 0.0003495688650664181, | |
| "loss": 0.3887, | |
| "step": 13410 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 0.00034945234211139595, | |
| "loss": 0.3179, | |
| "step": 13420 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 0.0003493358191563738, | |
| "loss": 0.3351, | |
| "step": 13430 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.0003492192962013517, | |
| "loss": 0.3515, | |
| "step": 13440 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.00034910277324632955, | |
| "loss": 0.3272, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.0003489862502913074, | |
| "loss": 0.3358, | |
| "step": 13460 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.00034886972733628525, | |
| "loss": 0.3854, | |
| "step": 13470 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 0.00034875320438126315, | |
| "loss": 0.3919, | |
| "step": 13480 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 0.000348636681426241, | |
| "loss": 0.2705, | |
| "step": 13490 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 0.00034852015847121884, | |
| "loss": 0.3102, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 0.0003484036355161967, | |
| "loss": 0.5056, | |
| "step": 13510 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 0.0003482871125611746, | |
| "loss": 0.3482, | |
| "step": 13520 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.00034817058960615244, | |
| "loss": 0.3364, | |
| "step": 13530 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.00034805406665113024, | |
| "loss": 0.328, | |
| "step": 13540 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.00034793754369610814, | |
| "loss": 0.3484, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 0.000347821020741086, | |
| "loss": 0.3948, | |
| "step": 13560 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 0.0003477044977860639, | |
| "loss": 0.3593, | |
| "step": 13570 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 0.0003475879748310417, | |
| "loss": 0.3694, | |
| "step": 13580 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 0.0003474714518760196, | |
| "loss": 0.3409, | |
| "step": 13590 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 0.00034735492892099744, | |
| "loss": 0.4645, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 0.0003472384059659753, | |
| "loss": 0.3569, | |
| "step": 13610 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 0.00034712188301095313, | |
| "loss": 0.3359, | |
| "step": 13620 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 0.00034700536005593104, | |
| "loss": 0.3948, | |
| "step": 13630 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 0.0003468888371009089, | |
| "loss": 0.3631, | |
| "step": 13640 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 0.00034677231414588673, | |
| "loss": 0.3676, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 0.0003466557911908646, | |
| "loss": 0.3725, | |
| "step": 13660 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 0.0003465392682358425, | |
| "loss": 0.3383, | |
| "step": 13670 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 0.00034642274528082033, | |
| "loss": 0.4721, | |
| "step": 13680 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 0.0003463062223257982, | |
| "loss": 0.3461, | |
| "step": 13690 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.00034618969937077603, | |
| "loss": 0.3888, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.00034607317641575393, | |
| "loss": 0.3821, | |
| "step": 13710 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.0003459566534607318, | |
| "loss": 0.3944, | |
| "step": 13720 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.00034584013050570963, | |
| "loss": 0.41, | |
| "step": 13730 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.0003457236075506875, | |
| "loss": 0.5004, | |
| "step": 13740 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 0.0003456070845956654, | |
| "loss": 0.3916, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 0.0003454905616406432, | |
| "loss": 0.4201, | |
| "step": 13760 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 0.0003453740386856211, | |
| "loss": 0.3571, | |
| "step": 13770 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 0.0003452575157305989, | |
| "loss": 0.3769, | |
| "step": 13780 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 0.0003451409927755768, | |
| "loss": 0.4201, | |
| "step": 13790 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 0.0003450244698205546, | |
| "loss": 0.4101, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 0.0003449079468655325, | |
| "loss": 0.3391, | |
| "step": 13810 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 0.00034479142391051037, | |
| "loss": 0.4024, | |
| "step": 13820 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 0.0003446749009554883, | |
| "loss": 0.3296, | |
| "step": 13830 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 0.00034455837800046607, | |
| "loss": 0.3834, | |
| "step": 13840 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 0.00034444185504544397, | |
| "loss": 0.3787, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 0.0003443253320904218, | |
| "loss": 0.481, | |
| "step": 13860 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 0.0003442088091353997, | |
| "loss": 0.3975, | |
| "step": 13870 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.0003440922861803775, | |
| "loss": 0.368, | |
| "step": 13880 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.0003439757632253554, | |
| "loss": 0.3682, | |
| "step": 13890 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.00034385924027033327, | |
| "loss": 0.3515, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.00034374271731531117, | |
| "loss": 0.3828, | |
| "step": 13910 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 0.00034362619436028896, | |
| "loss": 0.315, | |
| "step": 13920 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 0.00034350967140526687, | |
| "loss": 0.385, | |
| "step": 13930 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 0.0003433931484502447, | |
| "loss": 0.2604, | |
| "step": 13940 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 0.00034327662549522256, | |
| "loss": 0.319, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 0.0003431601025402004, | |
| "loss": 0.3606, | |
| "step": 13960 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 0.0003430435795851783, | |
| "loss": 0.3567, | |
| "step": 13970 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 0.00034292705663015616, | |
| "loss": 0.4613, | |
| "step": 13980 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 0.00034281053367513396, | |
| "loss": 0.385, | |
| "step": 13990 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 0.00034269401072011186, | |
| "loss": 0.3596, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 0.0003425774877650897, | |
| "loss": 0.3266, | |
| "step": 14010 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 0.0003424609648100676, | |
| "loss": 0.3339, | |
| "step": 14020 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 0.0003423444418550454, | |
| "loss": 0.4464, | |
| "step": 14030 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 0.0003422279189000233, | |
| "loss": 0.3633, | |
| "step": 14040 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 0.00034211139594500116, | |
| "loss": 0.4304, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 0.00034199487298997906, | |
| "loss": 0.3784, | |
| "step": 14060 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 0.00034187835003495685, | |
| "loss": 0.2993, | |
| "step": 14070 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 0.00034176182707993476, | |
| "loss": 0.3642, | |
| "step": 14080 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 0.0003416453041249126, | |
| "loss": 0.3952, | |
| "step": 14090 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 0.0003415287811698905, | |
| "loss": 0.3625, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 0.0003414122582148683, | |
| "loss": 0.4257, | |
| "step": 14110 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 0.0003412957352598462, | |
| "loss": 0.362, | |
| "step": 14120 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 0.00034117921230482405, | |
| "loss": 0.3809, | |
| "step": 14130 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 0.0003410626893498019, | |
| "loss": 0.4442, | |
| "step": 14140 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 0.00034094616639477975, | |
| "loss": 0.3999, | |
| "step": 14150 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 0.00034082964343975765, | |
| "loss": 0.3906, | |
| "step": 14160 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 0.0003407131204847355, | |
| "loss": 0.3515, | |
| "step": 14170 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.00034059659752971335, | |
| "loss": 0.3556, | |
| "step": 14180 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.0003404800745746912, | |
| "loss": 0.3242, | |
| "step": 14190 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.0003403635516196691, | |
| "loss": 0.4274, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.00034024702866464695, | |
| "loss": 0.4594, | |
| "step": 14210 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.0003401305057096248, | |
| "loss": 0.4227, | |
| "step": 14220 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.00034001398275460264, | |
| "loss": 0.358, | |
| "step": 14230 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.00033989745979958055, | |
| "loss": 0.3896, | |
| "step": 14240 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.0003397809368445584, | |
| "loss": 0.3688, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 0.00033966441388953624, | |
| "loss": 0.3999, | |
| "step": 14260 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 0.0003395478909345141, | |
| "loss": 0.4164, | |
| "step": 14270 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 0.000339431367979492, | |
| "loss": 0.43, | |
| "step": 14280 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 0.00033931484502446984, | |
| "loss": 0.3852, | |
| "step": 14290 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 0.0003391983220694477, | |
| "loss": 0.3611, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 0.00033908179911442554, | |
| "loss": 0.4676, | |
| "step": 14310 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 0.00033896527615940344, | |
| "loss": 0.3762, | |
| "step": 14320 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 0.00033884875320438124, | |
| "loss": 0.4117, | |
| "step": 14330 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 0.00033873223024935914, | |
| "loss": 0.3538, | |
| "step": 14340 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.000338615707294337, | |
| "loss": 0.4657, | |
| "step": 14350 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.0003384991843393149, | |
| "loss": 0.352, | |
| "step": 14360 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.0003383826613842927, | |
| "loss": 0.3408, | |
| "step": 14370 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.0003382661384292706, | |
| "loss": 0.3885, | |
| "step": 14380 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.00033814961547424843, | |
| "loss": 0.3607, | |
| "step": 14390 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 0.00033803309251922634, | |
| "loss": 0.4017, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 0.00033791656956420413, | |
| "loss": 0.3497, | |
| "step": 14410 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 0.00033780004660918203, | |
| "loss": 0.3996, | |
| "step": 14420 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 0.0003376835236541599, | |
| "loss": 0.4579, | |
| "step": 14430 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 0.0003375670006991378, | |
| "loss": 0.374, | |
| "step": 14440 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 0.0003374504777441156, | |
| "loss": 0.3747, | |
| "step": 14450 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 0.0003373339547890935, | |
| "loss": 0.4304, | |
| "step": 14460 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 0.00033721743183407133, | |
| "loss": 0.4045, | |
| "step": 14470 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 0.00033710090887904923, | |
| "loss": 0.3762, | |
| "step": 14480 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 0.000336984385924027, | |
| "loss": 0.4466, | |
| "step": 14490 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 0.0003368678629690049, | |
| "loss": 0.4148, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 0.0003367513400139828, | |
| "loss": 0.3531, | |
| "step": 14510 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 0.00033663481705896057, | |
| "loss": 0.3318, | |
| "step": 14520 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 0.0003365182941039385, | |
| "loss": 0.3931, | |
| "step": 14530 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 0.0003364017711489163, | |
| "loss": 0.2862, | |
| "step": 14540 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 0.0003362852481938942, | |
| "loss": 0.4828, | |
| "step": 14550 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 0.000336168725238872, | |
| "loss": 0.3579, | |
| "step": 14560 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 0.0003360522022838499, | |
| "loss": 0.346, | |
| "step": 14570 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 0.00033593567932882777, | |
| "loss": 0.4179, | |
| "step": 14580 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 0.0003358191563738057, | |
| "loss": 0.4223, | |
| "step": 14590 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 0.00033570263341878347, | |
| "loss": 0.4155, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 0.00033558611046376137, | |
| "loss": 0.3151, | |
| "step": 14610 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 0.0003354695875087392, | |
| "loss": 0.3775, | |
| "step": 14620 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 0.0003353530645537171, | |
| "loss": 0.4089, | |
| "step": 14630 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 0.0003352365415986949, | |
| "loss": 0.3432, | |
| "step": 14640 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 0.0003351200186436728, | |
| "loss": 0.3481, | |
| "step": 14650 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 0.00033500349568865067, | |
| "loss": 0.332, | |
| "step": 14660 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 0.00033488697273362857, | |
| "loss": 0.3366, | |
| "step": 14670 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 0.00033477044977860636, | |
| "loss": 0.5104, | |
| "step": 14680 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 0.00033465392682358427, | |
| "loss": 0.4099, | |
| "step": 14690 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 0.0003345374038685621, | |
| "loss": 0.3269, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 0.00033442088091353996, | |
| "loss": 0.3343, | |
| "step": 14710 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 0.0003343043579585178, | |
| "loss": 0.4143, | |
| "step": 14720 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 0.0003341878350034957, | |
| "loss": 0.3872, | |
| "step": 14730 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 0.00033407131204847356, | |
| "loss": 0.3662, | |
| "step": 14740 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 0.0003339547890934514, | |
| "loss": 0.3666, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 0.00033383826613842926, | |
| "loss": 0.4467, | |
| "step": 14760 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 0.00033372174318340716, | |
| "loss": 0.4086, | |
| "step": 14770 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 0.000333605220228385, | |
| "loss": 0.4165, | |
| "step": 14780 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 0.00033348869727336286, | |
| "loss": 0.3612, | |
| "step": 14790 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 0.0003333721743183407, | |
| "loss": 0.3583, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 0.0003332556513633186, | |
| "loss": 0.3849, | |
| "step": 14810 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 0.00033313912840829646, | |
| "loss": 0.4263, | |
| "step": 14820 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 0.0003330226054532743, | |
| "loss": 0.2838, | |
| "step": 14830 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 0.00033290608249825215, | |
| "loss": 0.2699, | |
| "step": 14840 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 0.00033278955954323006, | |
| "loss": 0.3844, | |
| "step": 14850 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 0.0003326730365882079, | |
| "loss": 0.3721, | |
| "step": 14860 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 0.00033255651363318575, | |
| "loss": 0.4008, | |
| "step": 14870 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 0.0003324399906781636, | |
| "loss": 0.4276, | |
| "step": 14880 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 0.0003323234677231415, | |
| "loss": 0.3757, | |
| "step": 14890 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 0.0003322069447681193, | |
| "loss": 0.4514, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 0.0003320904218130972, | |
| "loss": 0.3552, | |
| "step": 14910 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 0.00033197389885807505, | |
| "loss": 0.4417, | |
| "step": 14920 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 0.00033185737590305295, | |
| "loss": 0.3657, | |
| "step": 14930 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 0.00033174085294803075, | |
| "loss": 0.3678, | |
| "step": 14940 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 0.00033162432999300865, | |
| "loss": 0.3587, | |
| "step": 14950 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 0.0003315078070379865, | |
| "loss": 0.3822, | |
| "step": 14960 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 0.0003313912840829644, | |
| "loss": 0.4112, | |
| "step": 14970 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 0.0003312747611279422, | |
| "loss": 0.4425, | |
| "step": 14980 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 0.00033115823817292004, | |
| "loss": 0.4285, | |
| "step": 14990 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.00033104171521789794, | |
| "loss": 0.4583, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.0003309251922628758, | |
| "loss": 0.497, | |
| "step": 15010 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.00033080866930785364, | |
| "loss": 0.3621, | |
| "step": 15020 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.0003306921463528315, | |
| "loss": 0.3378, | |
| "step": 15030 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.0003305756233978094, | |
| "loss": 0.3557, | |
| "step": 15040 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 0.0003304591004427872, | |
| "loss": 0.3905, | |
| "step": 15050 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 0.0003303425774877651, | |
| "loss": 0.417, | |
| "step": 15060 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 0.00033022605453274294, | |
| "loss": 0.4327, | |
| "step": 15070 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 0.00033010953157772084, | |
| "loss": 0.4692, | |
| "step": 15080 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 0.00032999300862269863, | |
| "loss": 0.3902, | |
| "step": 15090 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 0.00032987648566767654, | |
| "loss": 0.3913, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 0.0003297599627126544, | |
| "loss": 0.3976, | |
| "step": 15110 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 0.0003296434397576323, | |
| "loss": 0.3878, | |
| "step": 15120 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.0003295269168026101, | |
| "loss": 0.5335, | |
| "step": 15130 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.000329410393847588, | |
| "loss": 0.3805, | |
| "step": 15140 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.00032929387089256583, | |
| "loss": 0.4133, | |
| "step": 15150 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.00032917734793754374, | |
| "loss": 0.3966, | |
| "step": 15160 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.00032906082498252153, | |
| "loss": 0.3819, | |
| "step": 15170 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 0.00032894430202749943, | |
| "loss": 0.3684, | |
| "step": 15180 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 0.0003288277790724773, | |
| "loss": 0.3883, | |
| "step": 15190 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 0.0003287112561174552, | |
| "loss": 0.3613, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 0.000328594733162433, | |
| "loss": 0.4092, | |
| "step": 15210 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 0.0003284782102074109, | |
| "loss": 0.3864, | |
| "step": 15220 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 0.00032836168725238873, | |
| "loss": 0.3888, | |
| "step": 15230 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 0.0003282451642973666, | |
| "loss": 0.4062, | |
| "step": 15240 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 0.0003281286413423444, | |
| "loss": 0.4343, | |
| "step": 15250 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.00032801211838732233, | |
| "loss": 0.4947, | |
| "step": 15260 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.0003278955954323002, | |
| "loss": 0.3297, | |
| "step": 15270 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.000327779072477278, | |
| "loss": 0.3375, | |
| "step": 15280 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.0003276625495222559, | |
| "loss": 0.3941, | |
| "step": 15290 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 0.0003275460265672338, | |
| "loss": 0.4277, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.0003274295036122116, | |
| "loss": 0.426, | |
| "step": 15310 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.00032731298065718947, | |
| "loss": 0.3812, | |
| "step": 15320 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.0003271964577021673, | |
| "loss": 0.4415, | |
| "step": 15330 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.0003270799347471452, | |
| "loss": 0.3589, | |
| "step": 15340 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 0.00032696341179212307, | |
| "loss": 0.4216, | |
| "step": 15350 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 0.0003268468888371009, | |
| "loss": 0.4259, | |
| "step": 15360 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 0.00032673036588207877, | |
| "loss": 0.4129, | |
| "step": 15370 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 0.00032661384292705667, | |
| "loss": 0.4317, | |
| "step": 15380 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 0.0003264973199720345, | |
| "loss": 0.4966, | |
| "step": 15390 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 0.00032638079701701237, | |
| "loss": 0.4386, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 0.0003262642740619902, | |
| "loss": 0.3636, | |
| "step": 15410 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 0.0003261477511069681, | |
| "loss": 0.3286, | |
| "step": 15420 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 0.0003260312281519459, | |
| "loss": 0.3619, | |
| "step": 15430 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 0.0003259147051969238, | |
| "loss": 0.4448, | |
| "step": 15440 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 0.00032579818224190166, | |
| "loss": 0.4407, | |
| "step": 15450 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 0.0003256816592868795, | |
| "loss": 0.3583, | |
| "step": 15460 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 0.00032556513633185736, | |
| "loss": 0.4122, | |
| "step": 15470 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.0003254486133768352, | |
| "loss": 0.4129, | |
| "step": 15480 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.0003253320904218131, | |
| "loss": 0.3352, | |
| "step": 15490 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.00032521556746679096, | |
| "loss": 0.4393, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.0003250990445117688, | |
| "loss": 0.3783, | |
| "step": 15510 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 0.00032498252155674666, | |
| "loss": 0.4093, | |
| "step": 15520 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 0.00032486599860172456, | |
| "loss": 0.4444, | |
| "step": 15530 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 0.0003247494756467024, | |
| "loss": 0.3857, | |
| "step": 15540 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 0.00032463295269168026, | |
| "loss": 0.4728, | |
| "step": 15550 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 0.0003245164297366581, | |
| "loss": 0.3692, | |
| "step": 15560 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 0.000324399906781636, | |
| "loss": 0.3148, | |
| "step": 15570 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 0.00032428338382661386, | |
| "loss": 0.3775, | |
| "step": 15580 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 0.0003241668608715917, | |
| "loss": 0.4214, | |
| "step": 15590 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 0.00032405033791656955, | |
| "loss": 0.3387, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.00032393381496154745, | |
| "loss": 0.4134, | |
| "step": 15610 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.00032381729200652525, | |
| "loss": 0.3061, | |
| "step": 15620 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.00032370076905150315, | |
| "loss": 0.4205, | |
| "step": 15630 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.000323584246096481, | |
| "loss": 0.4003, | |
| "step": 15640 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.0003234677231414589, | |
| "loss": 0.4012, | |
| "step": 15650 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 0.0003233512001864367, | |
| "loss": 0.4849, | |
| "step": 15660 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 0.0003232346772314146, | |
| "loss": 0.3454, | |
| "step": 15670 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 0.00032311815427639245, | |
| "loss": 0.3914, | |
| "step": 15680 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 0.00032300163132137035, | |
| "loss": 0.4868, | |
| "step": 15690 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 0.00032288510836634814, | |
| "loss": 0.4, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 0.00032276858541132605, | |
| "loss": 0.4648, | |
| "step": 15710 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 0.0003226520624563039, | |
| "loss": 0.3908, | |
| "step": 15720 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 0.0003225355395012818, | |
| "loss": 0.51, | |
| "step": 15730 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 0.0003224190165462596, | |
| "loss": 0.3576, | |
| "step": 15740 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 0.0003223024935912375, | |
| "loss": 0.3894, | |
| "step": 15750 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 0.00032218597063621534, | |
| "loss": 0.4271, | |
| "step": 15760 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 0.00032206944768119325, | |
| "loss": 0.389, | |
| "step": 15770 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 0.00032195292472617104, | |
| "loss": 0.4048, | |
| "step": 15780 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 0.00032183640177114894, | |
| "loss": 0.3575, | |
| "step": 15790 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 0.0003217198788161268, | |
| "loss": 0.4004, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 0.00032160335586110464, | |
| "loss": 0.3556, | |
| "step": 15810 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 0.0003214868329060825, | |
| "loss": 0.3672, | |
| "step": 15820 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 0.0003213703099510604, | |
| "loss": 0.3894, | |
| "step": 15830 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 0.00032125378699603824, | |
| "loss": 0.3964, | |
| "step": 15840 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 0.0003211372640410161, | |
| "loss": 0.4496, | |
| "step": 15850 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 0.00032102074108599394, | |
| "loss": 0.3854, | |
| "step": 15860 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 0.00032090421813097184, | |
| "loss": 0.428, | |
| "step": 15870 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 0.0003207876951759497, | |
| "loss": 0.3453, | |
| "step": 15880 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 0.00032067117222092753, | |
| "loss": 0.3847, | |
| "step": 15890 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 0.0003205546492659054, | |
| "loss": 0.3499, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 0.0003204381263108833, | |
| "loss": 0.3786, | |
| "step": 15910 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.00032032160335586113, | |
| "loss": 0.4293, | |
| "step": 15920 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.00032020508040083893, | |
| "loss": 0.3711, | |
| "step": 15930 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.00032008855744581683, | |
| "loss": 0.3793, | |
| "step": 15940 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 0.0003199720344907947, | |
| "loss": 0.4998, | |
| "step": 15950 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 0.0003198555115357726, | |
| "loss": 0.402, | |
| "step": 15960 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 0.0003197389885807504, | |
| "loss": 0.3963, | |
| "step": 15970 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 0.0003196224656257283, | |
| "loss": 0.336, | |
| "step": 15980 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 0.00031950594267070613, | |
| "loss": 0.3906, | |
| "step": 15990 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.000319389419715684, | |
| "loss": 0.4359, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.0003192728967606618, | |
| "loss": 0.4425, | |
| "step": 16010 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.0003191563738056397, | |
| "loss": 0.3933, | |
| "step": 16020 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.0003190398508506176, | |
| "loss": 0.3802, | |
| "step": 16030 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 0.0003189233278955954, | |
| "loss": 0.4211, | |
| "step": 16040 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 0.00031880680494057327, | |
| "loss": 0.4331, | |
| "step": 16050 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 0.0003186902819855512, | |
| "loss": 0.4106, | |
| "step": 16060 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 0.000318573759030529, | |
| "loss": 0.326, | |
| "step": 16070 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 0.00031845723607550687, | |
| "loss": 0.444, | |
| "step": 16080 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 0.0003183407131204847, | |
| "loss": 0.3604, | |
| "step": 16090 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 0.0003182241901654626, | |
| "loss": 0.3256, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 0.00031810766721044047, | |
| "loss": 0.4363, | |
| "step": 16110 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 0.0003179911442554183, | |
| "loss": 0.4663, | |
| "step": 16120 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.00031787462130039617, | |
| "loss": 0.4167, | |
| "step": 16130 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.00031775809834537407, | |
| "loss": 0.371, | |
| "step": 16140 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.0003176415753903519, | |
| "loss": 0.4051, | |
| "step": 16150 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.00031752505243532977, | |
| "loss": 0.4683, | |
| "step": 16160 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 0.0003174085294803076, | |
| "loss": 0.4194, | |
| "step": 16170 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 0.0003172920065252855, | |
| "loss": 0.3889, | |
| "step": 16180 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 0.0003171754835702633, | |
| "loss": 0.3615, | |
| "step": 16190 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 0.0003170589606152412, | |
| "loss": 0.4925, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 0.00031694243766021906, | |
| "loss": 0.3992, | |
| "step": 16210 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 0.00031682591470519697, | |
| "loss": 0.459, | |
| "step": 16220 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 0.00031670939175017476, | |
| "loss": 0.3699, | |
| "step": 16230 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 0.00031659286879515266, | |
| "loss": 0.4306, | |
| "step": 16240 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 0.0003164763458401305, | |
| "loss": 0.3863, | |
| "step": 16250 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.0003163598228851084, | |
| "loss": 0.4378, | |
| "step": 16260 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.0003162432999300862, | |
| "loss": 0.486, | |
| "step": 16270 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.0003161267769750641, | |
| "loss": 0.3251, | |
| "step": 16280 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.00031601025402004196, | |
| "loss": 0.3267, | |
| "step": 16290 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.00031589373106501986, | |
| "loss": 0.3773, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 0.00031577720810999765, | |
| "loss": 0.3589, | |
| "step": 16310 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 0.00031566068515497556, | |
| "loss": 0.4671, | |
| "step": 16320 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 0.0003155441621999534, | |
| "loss": 0.4616, | |
| "step": 16330 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 0.00031542763924493125, | |
| "loss": 0.3728, | |
| "step": 16340 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.0003153111162899091, | |
| "loss": 0.4246, | |
| "step": 16350 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.000315194593334887, | |
| "loss": 0.4186, | |
| "step": 16360 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.00031507807037986485, | |
| "loss": 0.4021, | |
| "step": 16370 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 0.0003149615474248427, | |
| "loss": 0.3895, | |
| "step": 16380 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.00031484502446982055, | |
| "loss": 0.4319, | |
| "step": 16390 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.00031472850151479845, | |
| "loss": 0.3498, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.0003146119785597763, | |
| "loss": 0.3789, | |
| "step": 16410 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.0003144954556047541, | |
| "loss": 0.3468, | |
| "step": 16420 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 0.000314378932649732, | |
| "loss": 0.4274, | |
| "step": 16430 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00031426240969470985, | |
| "loss": 0.4027, | |
| "step": 16440 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00031414588673968775, | |
| "loss": 0.444, | |
| "step": 16450 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00031402936378466554, | |
| "loss": 0.41, | |
| "step": 16460 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00031391284082964345, | |
| "loss": 0.4299, | |
| "step": 16470 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 0.0003137963178746213, | |
| "loss": 0.4295, | |
| "step": 16480 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 0.0003136797949195992, | |
| "loss": 0.4359, | |
| "step": 16490 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 0.000313563271964577, | |
| "loss": 0.4861, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 0.0003134467490095549, | |
| "loss": 0.4234, | |
| "step": 16510 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.00031333022605453274, | |
| "loss": 0.4391, | |
| "step": 16520 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.0003132137030995106, | |
| "loss": 0.3835, | |
| "step": 16530 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.00031309718014448844, | |
| "loss": 0.4576, | |
| "step": 16540 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.00031298065718946634, | |
| "loss": 0.3758, | |
| "step": 16550 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 0.0003128641342344442, | |
| "loss": 0.3515, | |
| "step": 16560 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.00031274761127942204, | |
| "loss": 0.4279, | |
| "step": 16570 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.0003126310883243999, | |
| "loss": 0.3725, | |
| "step": 16580 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.0003125145653693778, | |
| "loss": 0.4366, | |
| "step": 16590 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.00031239804241435564, | |
| "loss": 0.3677, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.0003122815194593335, | |
| "loss": 0.4184, | |
| "step": 16610 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.00031216499650431133, | |
| "loss": 0.3896, | |
| "step": 16620 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.00031204847354928924, | |
| "loss": 0.4163, | |
| "step": 16630 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 0.0003119319505942671, | |
| "loss": 0.3889, | |
| "step": 16640 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 0.00031181542763924493, | |
| "loss": 0.455, | |
| "step": 16650 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 0.0003116989046842228, | |
| "loss": 0.3758, | |
| "step": 16660 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 0.0003115823817292007, | |
| "loss": 0.3656, | |
| "step": 16670 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 0.00031146585877417853, | |
| "loss": 0.3781, | |
| "step": 16680 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 0.0003113493358191564, | |
| "loss": 0.3993, | |
| "step": 16690 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 0.00031123281286413423, | |
| "loss": 0.4871, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 0.00031111628990911213, | |
| "loss": 0.4029, | |
| "step": 16710 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 0.0003109997669540899, | |
| "loss": 0.3955, | |
| "step": 16720 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 0.00031088324399906783, | |
| "loss": 0.4014, | |
| "step": 16730 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 0.0003107667210440457, | |
| "loss": 0.4119, | |
| "step": 16740 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 0.0003106501980890236, | |
| "loss": 0.4613, | |
| "step": 16750 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 0.0003105336751340014, | |
| "loss": 0.3741, | |
| "step": 16760 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 0.0003104171521789793, | |
| "loss": 0.4436, | |
| "step": 16770 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.0003103006292239571, | |
| "loss": 0.4412, | |
| "step": 16780 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.00031018410626893503, | |
| "loss": 0.4534, | |
| "step": 16790 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.0003100675833139128, | |
| "loss": 0.4121, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.0003099510603588907, | |
| "loss": 0.4208, | |
| "step": 16810 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 0.00030983453740386857, | |
| "loss": 0.4304, | |
| "step": 16820 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 0.0003097180144488465, | |
| "loss": 0.3814, | |
| "step": 16830 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 0.00030960149149382427, | |
| "loss": 0.4537, | |
| "step": 16840 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 0.00030948496853880217, | |
| "loss": 0.3598, | |
| "step": 16850 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 0.00030936844558378, | |
| "loss": 0.4689, | |
| "step": 16860 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 0.0003092519226287579, | |
| "loss": 0.4239, | |
| "step": 16870 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 0.0003091353996737357, | |
| "loss": 0.4838, | |
| "step": 16880 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 0.0003090188767187136, | |
| "loss": 0.3399, | |
| "step": 16890 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 0.00030890235376369147, | |
| "loss": 0.4228, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.00030878583080866926, | |
| "loss": 0.408, | |
| "step": 16910 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.00030866930785364717, | |
| "loss": 0.3553, | |
| "step": 16920 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.000308552784898625, | |
| "loss": 0.4471, | |
| "step": 16930 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.0003084362619436029, | |
| "loss": 0.3707, | |
| "step": 16940 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.0003083197389885807, | |
| "loss": 0.4226, | |
| "step": 16950 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 0.0003082032160335586, | |
| "loss": 0.3847, | |
| "step": 16960 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 0.00030808669307853646, | |
| "loss": 0.3488, | |
| "step": 16970 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 0.00030797017012351436, | |
| "loss": 0.3581, | |
| "step": 16980 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 0.00030785364716849216, | |
| "loss": 0.4423, | |
| "step": 16990 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 0.00030773712421347006, | |
| "loss": 0.3793, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 0.0003076206012584479, | |
| "loss": 0.4583, | |
| "step": 17010 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 0.0003075040783034258, | |
| "loss": 0.4894, | |
| "step": 17020 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 0.0003073875553484036, | |
| "loss": 0.4853, | |
| "step": 17030 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.0003072710323933815, | |
| "loss": 0.3569, | |
| "step": 17040 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00030715450943835936, | |
| "loss": 0.4572, | |
| "step": 17050 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00030703798648333726, | |
| "loss": 0.444, | |
| "step": 17060 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00030692146352831505, | |
| "loss": 0.4176, | |
| "step": 17070 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00030680494057329296, | |
| "loss": 0.4235, | |
| "step": 17080 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 0.0003066884176182708, | |
| "loss": 0.4773, | |
| "step": 17090 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 0.00030657189466324865, | |
| "loss": 0.416, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 0.0003064553717082265, | |
| "loss": 0.4587, | |
| "step": 17110 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 0.0003063388487532044, | |
| "loss": 0.4066, | |
| "step": 17120 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 0.00030622232579818225, | |
| "loss": 0.3455, | |
| "step": 17130 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 0.0003061058028431601, | |
| "loss": 0.462, | |
| "step": 17140 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 0.00030598927988813795, | |
| "loss": 0.4757, | |
| "step": 17150 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 0.00030587275693311585, | |
| "loss": 0.3779, | |
| "step": 17160 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 0.0003057562339780937, | |
| "loss": 0.3778, | |
| "step": 17170 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 0.00030563971102307155, | |
| "loss": 0.4172, | |
| "step": 17180 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 0.0003055231880680494, | |
| "loss": 0.5275, | |
| "step": 17190 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 0.0003054066651130273, | |
| "loss": 0.4051, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 0.00030529014215800515, | |
| "loss": 0.3594, | |
| "step": 17210 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.000305173619202983, | |
| "loss": 0.4589, | |
| "step": 17220 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.00030505709624796084, | |
| "loss": 0.4427, | |
| "step": 17230 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.00030494057329293875, | |
| "loss": 0.4177, | |
| "step": 17240 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.0003048240503379166, | |
| "loss": 0.3853, | |
| "step": 17250 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 0.00030470752738289444, | |
| "loss": 0.4029, | |
| "step": 17260 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 0.0003045910044278723, | |
| "loss": 0.4892, | |
| "step": 17270 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 0.0003044744814728502, | |
| "loss": 0.3959, | |
| "step": 17280 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 0.000304357958517828, | |
| "loss": 0.4701, | |
| "step": 17290 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 0.0003042414355628059, | |
| "loss": 0.366, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 0.00030412491260778374, | |
| "loss": 0.3826, | |
| "step": 17310 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 0.00030400838965276164, | |
| "loss": 0.378, | |
| "step": 17320 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 0.00030389186669773944, | |
| "loss": 0.4033, | |
| "step": 17330 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 0.00030377534374271734, | |
| "loss": 0.4646, | |
| "step": 17340 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0003036588207876952, | |
| "loss": 0.4589, | |
| "step": 17350 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0003035422978326731, | |
| "loss": 0.5188, | |
| "step": 17360 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_Bleu_1": 0.005368360936607346, | |
| "eval_Bleu_2": 2.1529258556147605e-11, | |
| "eval_Bleu_3": 3.921163253647881e-14, | |
| "eval_Bleu_4": 1.83228507309933e-15, | |
| "eval_ROUGE_L": 0.008549703525608982, | |
| "eval_cer": 0.4694332130837954, | |
| "eval_em": 0.5671390013495277, | |
| "eval_f1": 0.7247699680587338, | |
| "eval_loss": 0.8551194071769714, | |
| "eval_runtime": 321.1768, | |
| "eval_samples_per_second": 18.457, | |
| "eval_steps_per_second": 3.693, | |
| "eval_wer": 0.5537877989674428, | |
| "step": 17365 | |
| } | |
| ], | |
| "max_steps": 43410, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.3589112280001126e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |