| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.997779422649889, | |
| "eval_steps": 100, | |
| "global_step": 337, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.014803849000740192, | |
| "grad_norm": 1.3950074179636944, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.2359, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.029607698001480384, | |
| "grad_norm": 1.0573019567596447, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.2574, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04441154700222058, | |
| "grad_norm": 0.8134201492153869, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 1.1927, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.05921539600296077, | |
| "grad_norm": 0.7457973965678334, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 1.1311, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07401924500370097, | |
| "grad_norm": 0.602302125271696, | |
| "learning_rate": 1.4705882352941179e-05, | |
| "loss": 1.0275, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08882309400444116, | |
| "grad_norm": 0.47180619385627864, | |
| "learning_rate": 1.7647058823529414e-05, | |
| "loss": 0.9946, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10362694300518134, | |
| "grad_norm": 0.41310193817382196, | |
| "learning_rate": 1.9999462497359468e-05, | |
| "loss": 0.9331, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.11843079200592153, | |
| "grad_norm": 0.3508357190318314, | |
| "learning_rate": 1.9980655971335944e-05, | |
| "loss": 0.9348, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13323464100666174, | |
| "grad_norm": 0.3593486162056259, | |
| "learning_rate": 1.993503206718859e-05, | |
| "loss": 0.919, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.14803849000740193, | |
| "grad_norm": 0.3434563600895919, | |
| "learning_rate": 1.986271337340182e-05, | |
| "loss": 0.8877, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.16284233900814213, | |
| "grad_norm": 0.3218326592207504, | |
| "learning_rate": 1.976389420563607e-05, | |
| "loss": 0.8672, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.17764618800888232, | |
| "grad_norm": 0.2794782066076086, | |
| "learning_rate": 1.9638840084614182e-05, | |
| "loss": 0.8711, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.19245003700962252, | |
| "grad_norm": 0.3155862961217361, | |
| "learning_rate": 1.9487887022684336e-05, | |
| "loss": 0.864, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.20725388601036268, | |
| "grad_norm": 0.29232837234969183, | |
| "learning_rate": 1.9311440620976597e-05, | |
| "loss": 0.871, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.22205773501110287, | |
| "grad_norm": 0.31299212515860614, | |
| "learning_rate": 1.9109974979578852e-05, | |
| "loss": 0.8679, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.23686158401184307, | |
| "grad_norm": 0.30813339743579415, | |
| "learning_rate": 1.8884031423660492e-05, | |
| "loss": 0.8548, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.25166543301258326, | |
| "grad_norm": 0.2970048857341024, | |
| "learning_rate": 1.8634217048966638e-05, | |
| "loss": 0.8143, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2664692820133235, | |
| "grad_norm": 0.2595311766061262, | |
| "learning_rate": 1.836120309059107e-05, | |
| "loss": 0.806, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.28127313101406365, | |
| "grad_norm": 0.29791231281719194, | |
| "learning_rate": 1.8065723119410885e-05, | |
| "loss": 0.837, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.29607698001480387, | |
| "grad_norm": 0.28316143699318014, | |
| "learning_rate": 1.77485710710289e-05, | |
| "loss": 0.8442, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.29607698001480387, | |
| "eval_loss": 0.8529201149940491, | |
| "eval_runtime": 3.2448, | |
| "eval_samples_per_second": 39.448, | |
| "eval_steps_per_second": 2.465, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.31088082901554404, | |
| "grad_norm": 0.2678385675212173, | |
| "learning_rate": 1.741059911251997e-05, | |
| "loss": 0.8226, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.32568467801628426, | |
| "grad_norm": 0.3055591219375268, | |
| "learning_rate": 1.7052715352713076e-05, | |
| "loss": 0.8186, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3404885270170244, | |
| "grad_norm": 0.2977107859630175, | |
| "learning_rate": 1.667588140216154e-05, | |
| "loss": 0.8115, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.35529237601776464, | |
| "grad_norm": 0.26074783577907235, | |
| "learning_rate": 1.628110978935756e-05, | |
| "loss": 0.7988, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3700962250185048, | |
| "grad_norm": 0.24990141576473401, | |
| "learning_rate": 1.586946124013354e-05, | |
| "loss": 0.8193, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.38490007401924503, | |
| "grad_norm": 0.24771464829165868, | |
| "learning_rate": 1.5442041827560274e-05, | |
| "loss": 0.8141, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3997039230199852, | |
| "grad_norm": 0.2594882847309642, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.8107, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.41450777202072536, | |
| "grad_norm": 0.2834385801937742, | |
| "learning_rate": 1.4544523495299843e-05, | |
| "loss": 0.7919, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4293116210214656, | |
| "grad_norm": 0.2951291350448052, | |
| "learning_rate": 1.4076836149416889e-05, | |
| "loss": 0.7844, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.44411547002220575, | |
| "grad_norm": 0.3081432942246415, | |
| "learning_rate": 1.3598194608050011e-05, | |
| "loss": 0.8105, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.45891931902294597, | |
| "grad_norm": 0.26025402768326344, | |
| "learning_rate": 1.3109884950114007e-05, | |
| "loss": 0.7732, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.47372316802368614, | |
| "grad_norm": 0.25691343184076304, | |
| "learning_rate": 1.2613219232128608e-05, | |
| "loss": 0.78, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.48852701702442636, | |
| "grad_norm": 0.2905870458048638, | |
| "learning_rate": 1.2109531962807333e-05, | |
| "loss": 0.8027, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5033308660251665, | |
| "grad_norm": 0.25823010004732944, | |
| "learning_rate": 1.1600176517318742e-05, | |
| "loss": 0.7787, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5181347150259067, | |
| "grad_norm": 0.24959707699686362, | |
| "learning_rate": 1.1086521500854746e-05, | |
| "loss": 0.7649, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.532938564026647, | |
| "grad_norm": 0.2568465717082724, | |
| "learning_rate": 1.0569947071276847e-05, | |
| "loss": 0.7867, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5477424130273871, | |
| "grad_norm": 0.25539944675036885, | |
| "learning_rate": 1.0051841230721065e-05, | |
| "loss": 0.8048, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5625462620281273, | |
| "grad_norm": 0.25082420610560624, | |
| "learning_rate": 9.533596096125826e-06, | |
| "loss": 0.7858, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5773501110288675, | |
| "grad_norm": 0.3003109255547984, | |
| "learning_rate": 9.016604158703654e-06, | |
| "loss": 0.7722, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5921539600296077, | |
| "grad_norm": 0.2393473681876409, | |
| "learning_rate": 8.502254542407186e-06, | |
| "loss": 0.7738, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5921539600296077, | |
| "eval_loss": 0.8121846914291382, | |
| "eval_runtime": 3.2502, | |
| "eval_samples_per_second": 39.382, | |
| "eval_steps_per_second": 2.461, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6069578090303479, | |
| "grad_norm": 0.23737158381630177, | |
| "learning_rate": 7.991929271442817e-06, | |
| "loss": 0.7891, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6217616580310881, | |
| "grad_norm": 0.25608546023616924, | |
| "learning_rate": 7.48699955686089e-06, | |
| "loss": 0.7728, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6365655070318282, | |
| "grad_norm": 0.25350011842005876, | |
| "learning_rate": 6.988822112200157e-06, | |
| "loss": 0.752, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6513693560325685, | |
| "grad_norm": 0.32545657590169597, | |
| "learning_rate": 6.498735508086094e-06, | |
| "loss": 0.7913, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6661732050333087, | |
| "grad_norm": 0.23799122178664417, | |
| "learning_rate": 6.018056575578075e-06, | |
| "loss": 0.7833, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6809770540340488, | |
| "grad_norm": 0.24439391189843535, | |
| "learning_rate": 5.548076867929331e-06, | |
| "loss": 0.772, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.695780903034789, | |
| "grad_norm": 0.25813071245227237, | |
| "learning_rate": 5.090059190266779e-06, | |
| "loss": 0.789, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7105847520355293, | |
| "grad_norm": 0.23513022985104692, | |
| "learning_rate": 4.645234206515171e-06, | |
| "loss": 0.781, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7253886010362695, | |
| "grad_norm": 0.24335351331930313, | |
| "learning_rate": 4.214797132682597e-06, | |
| "loss": 0.7585, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7401924500370096, | |
| "grad_norm": 0.25144488515437197, | |
| "learning_rate": 3.799904525392251e-06, | |
| "loss": 0.759, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.7549962990377498, | |
| "grad_norm": 0.2553299909901759, | |
| "learning_rate": 3.401671174289469e-06, | |
| "loss": 0.7677, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7698001480384901, | |
| "grad_norm": 0.2720348781912768, | |
| "learning_rate": 3.021167106673928e-06, | |
| "loss": 0.7933, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.7846039970392302, | |
| "grad_norm": 0.243515575079835, | |
| "learning_rate": 2.6594147124053983e-06, | |
| "loss": 0.7827, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7994078460399704, | |
| "grad_norm": 0.22351235710926948, | |
| "learning_rate": 2.317385996808195e-06, | |
| "loss": 0.7829, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.8142116950407106, | |
| "grad_norm": 0.26860530049876336, | |
| "learning_rate": 1.9959999689556407e-06, | |
| "loss": 0.7886, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8290155440414507, | |
| "grad_norm": 0.26582382708757263, | |
| "learning_rate": 1.6961201723520248e-06, | |
| "loss": 0.7713, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.843819393042191, | |
| "grad_norm": 0.2640598065421766, | |
| "learning_rate": 1.4185523646469822e-06, | |
| "loss": 0.7691, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8586232420429312, | |
| "grad_norm": 0.22463816665917336, | |
| "learning_rate": 1.1640423526166987e-06, | |
| "loss": 0.7847, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8734270910436713, | |
| "grad_norm": 0.23730897461564165, | |
| "learning_rate": 9.332739882292752e-07, | |
| "loss": 0.7785, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8882309400444115, | |
| "grad_norm": 0.2722461729619355, | |
| "learning_rate": 7.268673311786378e-07, | |
| "loss": 0.7576, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.8882309400444115, | |
| "eval_loss": 0.802749514579773, | |
| "eval_runtime": 3.2538, | |
| "eval_samples_per_second": 39.338, | |
| "eval_steps_per_second": 2.459, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9030347890451518, | |
| "grad_norm": 0.32408993797811003, | |
| "learning_rate": 5.453769828241872e-07, | |
| "loss": 0.7771, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9178386380458919, | |
| "grad_norm": 0.22290080273838486, | |
| "learning_rate": 3.8929059601275463e-07, | |
| "loss": 0.7812, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9326424870466321, | |
| "grad_norm": 0.22565671364678455, | |
| "learning_rate": 2.5902756478688674e-07, | |
| "loss": 0.7733, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9474463360473723, | |
| "grad_norm": 0.25949029649149674, | |
| "learning_rate": 1.5493789750014032e-07, | |
| "loss": 0.7747, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9622501850481125, | |
| "grad_norm": 0.2389112376152442, | |
| "learning_rate": 7.730127636723539e-08, | |
| "loss": 0.7262, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9770540340488527, | |
| "grad_norm": 0.24281049666379864, | |
| "learning_rate": 2.6326305976001054e-08, | |
| "loss": 0.7744, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.9918578830495929, | |
| "grad_norm": 0.25524056642719295, | |
| "learning_rate": 2.149952780321485e-09, | |
| "loss": 0.782, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.997779422649889, | |
| "step": 337, | |
| "total_flos": 76745898196992.0, | |
| "train_loss": 0.832761281675684, | |
| "train_runtime": 2027.1781, | |
| "train_samples_per_second": 10.658, | |
| "train_steps_per_second": 0.166 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 337, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 76745898196992.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |