Linksome commited on
Commit
b007fb3
·
verified ·
1 Parent(s): 185eb10

Upload folder using huggingface_hub

Browse files
Files changed (39) hide show
  1. .gitattributes +2 -0
  2. .ipynb_checkpoints/README-checkpoint.md +63 -0
  3. README.md +63 -0
  4. adapter_config.json +42 -0
  5. adapter_model.safetensors +3 -0
  6. all_results.json +9 -0
  7. chat_template.jinja +4 -0
  8. checkpoint-260/README.md +208 -0
  9. checkpoint-260/adapter_config.json +42 -0
  10. checkpoint-260/adapter_model.safetensors +3 -0
  11. checkpoint-260/chat_template.jinja +4 -0
  12. checkpoint-260/global_step260/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
  13. checkpoint-260/global_step260/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt +3 -0
  14. checkpoint-260/global_step260/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt +3 -0
  15. checkpoint-260/global_step260/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt +3 -0
  16. checkpoint-260/global_step260/zero_pp_rank_0_mp_rank_00_model_states.pt +3 -0
  17. checkpoint-260/global_step260/zero_pp_rank_1_mp_rank_00_model_states.pt +3 -0
  18. checkpoint-260/global_step260/zero_pp_rank_2_mp_rank_00_model_states.pt +3 -0
  19. checkpoint-260/global_step260/zero_pp_rank_3_mp_rank_00_model_states.pt +3 -0
  20. checkpoint-260/latest +1 -0
  21. checkpoint-260/rng_state_0.pth +3 -0
  22. checkpoint-260/rng_state_1.pth +3 -0
  23. checkpoint-260/rng_state_2.pth +3 -0
  24. checkpoint-260/rng_state_3.pth +3 -0
  25. checkpoint-260/scheduler.pt +3 -0
  26. checkpoint-260/special_tokens_map.json +17 -0
  27. checkpoint-260/tokenizer.json +3 -0
  28. checkpoint-260/tokenizer_config.json +2065 -0
  29. checkpoint-260/trainer_state.json +2634 -0
  30. checkpoint-260/training_args.bin +3 -0
  31. checkpoint-260/zero_to_fp32.py +760 -0
  32. special_tokens_map.json +17 -0
  33. tokenizer.json +3 -0
  34. tokenizer_config.json +2065 -0
  35. train_results.json +9 -0
  36. trainer_log.jsonl +273 -0
  37. trainer_state.json +2644 -0
  38. training_args.bin +3 -0
  39. training_loss.png +0 -0
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ checkpoint-260/tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
.ipynb_checkpoints/README-checkpoint.md ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ license: other
4
+ base_model: meta-llama/Llama-3.1-70B
5
+ tags:
6
+ - base_model:adapter:/workspace/meta-llama/Llama-3.1-70B
7
+ - llama-factory
8
+ - lora
9
+ - transformers
10
+ pipeline_tag: text-generation
11
+ model-index:
12
+ - name: IEEE-DLD-llama3_70b_LoRA
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # IEEE-DLD-llama3_70b_LoRA
20
+
21
+ This model is a fine-tuned version of [/workspace/meta-llama/Llama-3.1-70B](https://huggingface.co//workspace/meta-llama/Llama-3.1-70B) on the Abbey_College_London_DLD_all_rephrased dataset.
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 0.0001
41
+ - train_batch_size: 24
42
+ - eval_batch_size: 8
43
+ - seed: 42
44
+ - distributed_type: multi-GPU
45
+ - num_devices: 4
46
+ - gradient_accumulation_steps: 4
47
+ - total_train_batch_size: 384
48
+ - total_eval_batch_size: 32
49
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
50
+ - lr_scheduler_type: cosine
51
+ - num_epochs: 5.0
52
+
53
+ ### Training results
54
+
55
+
56
+
57
+ ### Framework versions
58
+
59
+ - PEFT 0.17.1
60
+ - Transformers 4.56.2
61
+ - Pytorch 2.8.0+cu128
62
+ - Datasets 4.0.0
63
+ - Tokenizers 0.22.1
README.md ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ license: other
4
+ base_model: meta-llama/Llama-3.1-70B
5
+ tags:
6
+ - base_model:adapter:/workspace/meta-llama/Llama-3.1-70B
7
+ - llama-factory
8
+ - lora
9
+ - transformers
10
+ pipeline_tag: text-generation
11
+ model-index:
12
+ - name: IEEE-DLD-llama3_70b_LoRA
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # IEEE-DLD-llama3_70b_LoRA
20
+
21
+ This model is a fine-tuned version of [/workspace/meta-llama/Llama-3.1-70B](https://huggingface.co//workspace/meta-llama/Llama-3.1-70B) on the Abbey_College_London_DLD_all_rephrased dataset.
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 0.0001
41
+ - train_batch_size: 24
42
+ - eval_batch_size: 8
43
+ - seed: 42
44
+ - distributed_type: multi-GPU
45
+ - num_devices: 4
46
+ - gradient_accumulation_steps: 4
47
+ - total_train_batch_size: 384
48
+ - total_eval_batch_size: 32
49
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
50
+ - lr_scheduler_type: cosine
51
+ - num_epochs: 5.0
52
+
53
+ ### Training results
54
+
55
+
56
+
57
+ ### Framework versions
58
+
59
+ - PEFT 0.17.1
60
+ - Transformers 4.56.2
61
+ - Pytorch 2.8.0+cu128
62
+ - Datasets 4.0.0
63
+ - Tokenizers 0.22.1
adapter_config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "/workspace/meta-llama/Llama-3.1-70B",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "down_proj",
29
+ "up_proj",
30
+ "k_proj",
31
+ "o_proj",
32
+ "q_proj",
33
+ "v_proj",
34
+ "gate_proj"
35
+ ],
36
+ "target_parameters": null,
37
+ "task_type": "CAUSAL_LM",
38
+ "trainable_token_indices": null,
39
+ "use_dora": false,
40
+ "use_qalora": false,
41
+ "use_rslora": false
42
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:706c0dc63a0f2c4291b7037a8a2d8bbc3032605bb3567a4bcae96db169a8eb59
3
+ size 414339864
all_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 5.0,
3
+ "num_input_tokens_seen": 100663200,
4
+ "total_flos": 2377568804143104.0,
5
+ "train_loss": 0.697018449810835,
6
+ "train_runtime": 22008.2973,
7
+ "train_samples_per_second": 4.45,
8
+ "train_steps_per_second": 0.012
9
+ }
chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ 'System: ' + system_message + '<|end_of_text|>' + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '<|end_of_text|>' + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end_of_text|>' + '
4
+ ' }}{% endif %}{% endfor %}
checkpoint-260/README.md ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: /workspace/meta-llama/Llama-3.1-70B
3
+ library_name: peft
4
+ pipeline_tag: text-generation
5
+ tags:
6
+ - base_model:adapter:/workspace/meta-llama/Llama-3.1-70B
7
+ - llama-factory
8
+ - lora
9
+ - transformers
10
+ ---
11
+
12
+ # Model Card for Model ID
13
+
14
+ <!-- Provide a quick summary of what the model is/does. -->
15
+
16
+
17
+
18
+ ## Model Details
19
+
20
+ ### Model Description
21
+
22
+ <!-- Provide a longer summary of what this model is. -->
23
+
24
+
25
+
26
+ - **Developed by:** [More Information Needed]
27
+ - **Funded by [optional]:** [More Information Needed]
28
+ - **Shared by [optional]:** [More Information Needed]
29
+ - **Model type:** [More Information Needed]
30
+ - **Language(s) (NLP):** [More Information Needed]
31
+ - **License:** [More Information Needed]
32
+ - **Finetuned from model [optional]:** [More Information Needed]
33
+
34
+ ### Model Sources [optional]
35
+
36
+ <!-- Provide the basic links for the model. -->
37
+
38
+ - **Repository:** [More Information Needed]
39
+ - **Paper [optional]:** [More Information Needed]
40
+ - **Demo [optional]:** [More Information Needed]
41
+
42
+ ## Uses
43
+
44
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
45
+
46
+ ### Direct Use
47
+
48
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
49
+
50
+ [More Information Needed]
51
+
52
+ ### Downstream Use [optional]
53
+
54
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
55
+
56
+ [More Information Needed]
57
+
58
+ ### Out-of-Scope Use
59
+
60
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
61
+
62
+ [More Information Needed]
63
+
64
+ ## Bias, Risks, and Limitations
65
+
66
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
67
+
68
+ [More Information Needed]
69
+
70
+ ### Recommendations
71
+
72
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
73
+
74
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
75
+
76
+ ## How to Get Started with the Model
77
+
78
+ Use the code below to get started with the model.
79
+
80
+ [More Information Needed]
81
+
82
+ ## Training Details
83
+
84
+ ### Training Data
85
+
86
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
87
+
88
+ [More Information Needed]
89
+
90
+ ### Training Procedure
91
+
92
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
93
+
94
+ #### Preprocessing [optional]
95
+
96
+ [More Information Needed]
97
+
98
+
99
+ #### Training Hyperparameters
100
+
101
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
102
+
103
+ #### Speeds, Sizes, Times [optional]
104
+
105
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
106
+
107
+ [More Information Needed]
108
+
109
+ ## Evaluation
110
+
111
+ <!-- This section describes the evaluation protocols and provides the results. -->
112
+
113
+ ### Testing Data, Factors & Metrics
114
+
115
+ #### Testing Data
116
+
117
+ <!-- This should link to a Dataset Card if possible. -->
118
+
119
+ [More Information Needed]
120
+
121
+ #### Factors
122
+
123
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
124
+
125
+ [More Information Needed]
126
+
127
+ #### Metrics
128
+
129
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
130
+
131
+ [More Information Needed]
132
+
133
+ ### Results
134
+
135
+ [More Information Needed]
136
+
137
+ #### Summary
138
+
139
+
140
+
141
+ ## Model Examination [optional]
142
+
143
+ <!-- Relevant interpretability work for the model goes here -->
144
+
145
+ [More Information Needed]
146
+
147
+ ## Environmental Impact
148
+
149
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
150
+
151
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
152
+
153
+ - **Hardware Type:** [More Information Needed]
154
+ - **Hours used:** [More Information Needed]
155
+ - **Cloud Provider:** [More Information Needed]
156
+ - **Compute Region:** [More Information Needed]
157
+ - **Carbon Emitted:** [More Information Needed]
158
+
159
+ ## Technical Specifications [optional]
160
+
161
+ ### Model Architecture and Objective
162
+
163
+ [More Information Needed]
164
+
165
+ ### Compute Infrastructure
166
+
167
+ [More Information Needed]
168
+
169
+ #### Hardware
170
+
171
+ [More Information Needed]
172
+
173
+ #### Software
174
+
175
+ [More Information Needed]
176
+
177
+ ## Citation [optional]
178
+
179
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
180
+
181
+ **BibTeX:**
182
+
183
+ [More Information Needed]
184
+
185
+ **APA:**
186
+
187
+ [More Information Needed]
188
+
189
+ ## Glossary [optional]
190
+
191
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
192
+
193
+ [More Information Needed]
194
+
195
+ ## More Information [optional]
196
+
197
+ [More Information Needed]
198
+
199
+ ## Model Card Authors [optional]
200
+
201
+ [More Information Needed]
202
+
203
+ ## Model Card Contact
204
+
205
+ [More Information Needed]
206
+ ### Framework versions
207
+
208
+ - PEFT 0.17.1
checkpoint-260/adapter_config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "/workspace/meta-llama/Llama-3.1-70B",
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
+ "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "qalora_group_size": 16,
24
+ "r": 16,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "down_proj",
29
+ "up_proj",
30
+ "k_proj",
31
+ "o_proj",
32
+ "q_proj",
33
+ "v_proj",
34
+ "gate_proj"
35
+ ],
36
+ "target_parameters": null,
37
+ "task_type": "CAUSAL_LM",
38
+ "trainable_token_indices": null,
39
+ "use_dora": false,
40
+ "use_qalora": false,
41
+ "use_rslora": false
42
+ }
checkpoint-260/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:706c0dc63a0f2c4291b7037a8a2d8bbc3032605bb3567a4bcae96db169a8eb59
3
+ size 414339864
checkpoint-260/chat_template.jinja ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ 'System: ' + system_message + '<|end_of_text|>' + '
2
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '<|end_of_text|>' + '
3
+ Assistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end_of_text|>' + '
4
+ ' }}{% endif %}{% endfor %}
checkpoint-260/global_step260/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bb682347561733dc3841fa2c66162ae4f69b68b34dd268c7229e9191823e0a4
3
+ size 621285637
checkpoint-260/global_step260/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c12a74a36a9fb896503bf821368ca48fd52952d795c520cee8ac63e27225d8b
3
+ size 621285637
checkpoint-260/global_step260/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de0986cbd0a9c67912418c7aef2ee5cd269c4c430d35cca7b89d0e0d12d5de59
3
+ size 621285637
checkpoint-260/global_step260/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:250eac8ac5051a491bf2cb03c7afa9528a37bab1fd14c40bb8ac571eef419172
3
+ size 621285637
checkpoint-260/global_step260/zero_pp_rank_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a64cc086f9a9b0cd01de6c8749cf37d5fa41a3579fbf814045a8c34cf3d6357f
3
+ size 1108113
checkpoint-260/global_step260/zero_pp_rank_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b4a2f7968faa29e3b61bfea42be0add9a355ff69f3198fd1fb58accc11e3e3f
3
+ size 1108113
checkpoint-260/global_step260/zero_pp_rank_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:193f06aff0322b6a0bc789087a2e21bb8fcc23b75496d889030e8b1a60d077d2
3
+ size 1108113
checkpoint-260/global_step260/zero_pp_rank_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18995e5a49db029b70eada9778dace024591b0961ea080a9154f5e0148e5693a
3
+ size 1108113
checkpoint-260/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step260
checkpoint-260/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e194728aaa10c0651a28dd077b8d2c23c9f33fe417ad98434b9d53b89331c8ad
3
+ size 15429
checkpoint-260/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf96eb5be532d73f38945749361f3edd34b97cc076d4be7e2cde686e5359b8d7
3
+ size 15429
checkpoint-260/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3730e5abab5d2c549528777cb61f23b5504fcdf03bc11ecbc1c43cde06a8681
3
+ size 15429
checkpoint-260/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d3c11250a36c56988f28d2935067da0ae1298b63289c54f8eae528ff166c9fa
3
+ size 15429
checkpoint-260/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9655e129d871161f2af0cb4bce0cc09811c5802ee17c1cdee81964107370c5c
3
+ size 1465
checkpoint-260/special_tokens_map.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|end_of_text|>"
17
+ }
checkpoint-260/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
checkpoint-260/tokenizer_config.json ADDED
@@ -0,0 +1,2065 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "clean_up_tokenization_spaces": true,
2054
+ "eos_token": "<|end_of_text|>",
2055
+ "extra_special_tokens": {},
2056
+ "model_input_names": [
2057
+ "input_ids",
2058
+ "attention_mask"
2059
+ ],
2060
+ "model_max_length": 131072,
2061
+ "pad_token": "<|end_of_text|>",
2062
+ "padding_side": "right",
2063
+ "split_special_tokens": false,
2064
+ "tokenizer_class": "PreTrainedTokenizerFast"
2065
+ }
checkpoint-260/trainer_state.json ADDED
@@ -0,0 +1,2634 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 5.0,
6
+ "eval_steps": 500,
7
+ "global_step": 260,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.01951219512195122,
14
+ "grad_norm": 0.15858030701789594,
15
+ "learning_rate": 0.0001,
16
+ "loss": 1.9481,
17
+ "num_input_tokens_seen": 392832,
18
+ "step": 1,
19
+ "train_runtime": 90.5356,
20
+ "train_tokens_per_second": 4338.977
21
+ },
22
+ {
23
+ "epoch": 0.03902439024390244,
24
+ "grad_norm": 0.20614269950817926,
25
+ "learning_rate": 9.999635004278054e-05,
26
+ "loss": 1.9426,
27
+ "num_input_tokens_seen": 785664,
28
+ "step": 2,
29
+ "train_runtime": 176.3121,
30
+ "train_tokens_per_second": 4456.097
31
+ },
32
+ {
33
+ "epoch": 0.05853658536585366,
34
+ "grad_norm": 0.11931287845406353,
35
+ "learning_rate": 9.998540070400966e-05,
36
+ "loss": 1.8808,
37
+ "num_input_tokens_seen": 1178496,
38
+ "step": 3,
39
+ "train_runtime": 261.8343,
40
+ "train_tokens_per_second": 4500.924
41
+ },
42
+ {
43
+ "epoch": 0.07804878048780488,
44
+ "grad_norm": 0.1061877717922271,
45
+ "learning_rate": 9.996715358227206e-05,
46
+ "loss": 1.8827,
47
+ "num_input_tokens_seen": 1571328,
48
+ "step": 4,
49
+ "train_runtime": 347.2723,
50
+ "train_tokens_per_second": 4524.772
51
+ },
52
+ {
53
+ "epoch": 0.0975609756097561,
54
+ "grad_norm": 0.2303512070226615,
55
+ "learning_rate": 9.994161134161634e-05,
56
+ "loss": 1.8196,
57
+ "num_input_tokens_seen": 1964160,
58
+ "step": 5,
59
+ "train_runtime": 432.7972,
60
+ "train_tokens_per_second": 4538.292
61
+ },
62
+ {
63
+ "epoch": 0.11707317073170732,
64
+ "grad_norm": 0.10282606368495031,
65
+ "learning_rate": 9.990877771116589e-05,
66
+ "loss": 1.7928,
67
+ "num_input_tokens_seen": 2356992,
68
+ "step": 6,
69
+ "train_runtime": 518.285,
70
+ "train_tokens_per_second": 4547.676
71
+ },
72
+ {
73
+ "epoch": 0.13658536585365855,
74
+ "grad_norm": 0.09334879403165187,
75
+ "learning_rate": 9.986865748457457e-05,
76
+ "loss": 1.7726,
77
+ "num_input_tokens_seen": 2749824,
78
+ "step": 7,
79
+ "train_runtime": 603.8579,
80
+ "train_tokens_per_second": 4553.76
81
+ },
82
+ {
83
+ "epoch": 0.15609756097560976,
84
+ "grad_norm": 0.08922352642855319,
85
+ "learning_rate": 9.982125651932681e-05,
86
+ "loss": 1.7569,
87
+ "num_input_tokens_seen": 3142656,
88
+ "step": 8,
89
+ "train_runtime": 689.4943,
90
+ "train_tokens_per_second": 4557.915
91
+ },
92
+ {
93
+ "epoch": 0.17560975609756097,
94
+ "grad_norm": 0.08017329983520603,
95
+ "learning_rate": 9.976658173588244e-05,
96
+ "loss": 1.7263,
97
+ "num_input_tokens_seen": 3535488,
98
+ "step": 9,
99
+ "train_runtime": 775.2129,
100
+ "train_tokens_per_second": 4560.667
101
+ },
102
+ {
103
+ "epoch": 0.1951219512195122,
104
+ "grad_norm": 0.09081767257053708,
105
+ "learning_rate": 9.970464111666626e-05,
106
+ "loss": 1.7278,
107
+ "num_input_tokens_seen": 3928320,
108
+ "step": 10,
109
+ "train_runtime": 860.7493,
110
+ "train_tokens_per_second": 4563.837
111
+ },
112
+ {
113
+ "epoch": 0.2146341463414634,
114
+ "grad_norm": 0.09473566656122094,
115
+ "learning_rate": 9.96354437049027e-05,
116
+ "loss": 1.719,
117
+ "num_input_tokens_seen": 4321152,
118
+ "step": 11,
119
+ "train_runtime": 946.3458,
120
+ "train_tokens_per_second": 4566.145
121
+ },
122
+ {
123
+ "epoch": 0.23414634146341465,
124
+ "grad_norm": 0.09855985009583386,
125
+ "learning_rate": 9.955899960329547e-05,
126
+ "loss": 1.6873,
127
+ "num_input_tokens_seen": 4713984,
128
+ "step": 12,
129
+ "train_runtime": 1031.9306,
130
+ "train_tokens_per_second": 4568.121
131
+ },
132
+ {
133
+ "epoch": 0.25365853658536586,
134
+ "grad_norm": 0.08976785941545291,
135
+ "learning_rate": 9.947531997255256e-05,
136
+ "loss": 1.6583,
137
+ "num_input_tokens_seen": 5106816,
138
+ "step": 13,
139
+ "train_runtime": 1117.6812,
140
+ "train_tokens_per_second": 4569.117
141
+ },
142
+ {
143
+ "epoch": 0.2731707317073171,
144
+ "grad_norm": 0.13069965458083008,
145
+ "learning_rate": 9.938441702975689e-05,
146
+ "loss": 1.6484,
147
+ "num_input_tokens_seen": 5499648,
148
+ "step": 14,
149
+ "train_runtime": 1203.3824,
150
+ "train_tokens_per_second": 4570.158
151
+ },
152
+ {
153
+ "epoch": 0.2926829268292683,
154
+ "grad_norm": 0.09614160184332628,
155
+ "learning_rate": 9.928630404658255e-05,
156
+ "loss": 1.6392,
157
+ "num_input_tokens_seen": 5892480,
158
+ "step": 15,
159
+ "train_runtime": 1288.9765,
160
+ "train_tokens_per_second": 4571.441
161
+ },
162
+ {
163
+ "epoch": 0.3121951219512195,
164
+ "grad_norm": 0.11451137531983133,
165
+ "learning_rate": 9.918099534735718e-05,
166
+ "loss": 1.6079,
167
+ "num_input_tokens_seen": 6285312,
168
+ "step": 16,
169
+ "train_runtime": 1374.5203,
170
+ "train_tokens_per_second": 4572.731
171
+ },
172
+ {
173
+ "epoch": 0.33170731707317075,
174
+ "grad_norm": 0.1000369686921647,
175
+ "learning_rate": 9.906850630697068e-05,
176
+ "loss": 1.614,
177
+ "num_input_tokens_seen": 6678144,
178
+ "step": 17,
179
+ "train_runtime": 1460.267,
180
+ "train_tokens_per_second": 4573.235
181
+ },
182
+ {
183
+ "epoch": 0.35121951219512193,
184
+ "grad_norm": 0.11186890632545277,
185
+ "learning_rate": 9.894885334863044e-05,
186
+ "loss": 1.5871,
187
+ "num_input_tokens_seen": 7070976,
188
+ "step": 18,
189
+ "train_runtime": 1545.8916,
190
+ "train_tokens_per_second": 4574.044
191
+ },
192
+ {
193
+ "epoch": 0.37073170731707317,
194
+ "grad_norm": 0.12930001237850156,
195
+ "learning_rate": 9.882205394146361e-05,
196
+ "loss": 1.5864,
197
+ "num_input_tokens_seen": 7463808,
198
+ "step": 19,
199
+ "train_runtime": 1631.4769,
200
+ "train_tokens_per_second": 4574.878
201
+ },
202
+ {
203
+ "epoch": 0.3902439024390244,
204
+ "grad_norm": 0.11111621443548497,
205
+ "learning_rate": 9.868812659796668e-05,
206
+ "loss": 1.547,
207
+ "num_input_tokens_seen": 7856640,
208
+ "step": 20,
209
+ "train_runtime": 1717.1186,
210
+ "train_tokens_per_second": 4575.479
211
+ },
212
+ {
213
+ "epoch": 0.4097560975609756,
214
+ "grad_norm": 0.10197780896100014,
215
+ "learning_rate": 9.85470908713026e-05,
216
+ "loss": 1.5349,
217
+ "num_input_tokens_seen": 8249472,
218
+ "step": 21,
219
+ "train_runtime": 1802.6996,
220
+ "train_tokens_per_second": 4576.177
221
+ },
222
+ {
223
+ "epoch": 0.4292682926829268,
224
+ "grad_norm": 0.11362815005489417,
225
+ "learning_rate": 9.839896735244614e-05,
226
+ "loss": 1.5123,
227
+ "num_input_tokens_seen": 8642304,
228
+ "step": 22,
229
+ "train_runtime": 1888.2112,
230
+ "train_tokens_per_second": 4576.98
231
+ },
232
+ {
233
+ "epoch": 0.44878048780487806,
234
+ "grad_norm": 0.5028658075504846,
235
+ "learning_rate": 9.824377766717759e-05,
236
+ "loss": 1.4901,
237
+ "num_input_tokens_seen": 9035136,
238
+ "step": 23,
239
+ "train_runtime": 1973.7336,
240
+ "train_tokens_per_second": 4577.688
241
+ },
242
+ {
243
+ "epoch": 0.4682926829268293,
244
+ "grad_norm": 0.12820339687555396,
245
+ "learning_rate": 9.808154447292539e-05,
246
+ "loss": 1.4924,
247
+ "num_input_tokens_seen": 9427968,
248
+ "step": 24,
249
+ "train_runtime": 2059.3532,
250
+ "train_tokens_per_second": 4578.121
251
+ },
252
+ {
253
+ "epoch": 0.4878048780487805,
254
+ "grad_norm": 0.13588084054069685,
255
+ "learning_rate": 9.791229145545831e-05,
256
+ "loss": 1.4489,
257
+ "num_input_tokens_seen": 9820800,
258
+ "step": 25,
259
+ "train_runtime": 2145.0524,
260
+ "train_tokens_per_second": 4578.35
261
+ },
262
+ {
263
+ "epoch": 0.5073170731707317,
264
+ "grad_norm": 0.13132153795044452,
265
+ "learning_rate": 9.773604332542729e-05,
266
+ "loss": 1.4188,
267
+ "num_input_tokens_seen": 10213632,
268
+ "step": 26,
269
+ "train_runtime": 2230.6879,
270
+ "train_tokens_per_second": 4578.692
271
+ },
272
+ {
273
+ "epoch": 0.526829268292683,
274
+ "grad_norm": 0.13496880260647526,
275
+ "learning_rate": 9.755282581475769e-05,
276
+ "loss": 1.4272,
277
+ "num_input_tokens_seen": 10606464,
278
+ "step": 27,
279
+ "train_runtime": 2316.1437,
280
+ "train_tokens_per_second": 4579.364
281
+ },
282
+ {
283
+ "epoch": 0.5463414634146342,
284
+ "grad_norm": 0.14799450429263764,
285
+ "learning_rate": 9.736266567289253e-05,
286
+ "loss": 1.3815,
287
+ "num_input_tokens_seen": 10999296,
288
+ "step": 28,
289
+ "train_runtime": 2401.7043,
290
+ "train_tokens_per_second": 4579.788
291
+ },
292
+ {
293
+ "epoch": 0.5658536585365853,
294
+ "grad_norm": 0.32782733922481533,
295
+ "learning_rate": 9.716559066288715e-05,
296
+ "loss": 1.3752,
297
+ "num_input_tokens_seen": 11392128,
298
+ "step": 29,
299
+ "train_runtime": 2487.3215,
300
+ "train_tokens_per_second": 4580.079
301
+ },
302
+ {
303
+ "epoch": 0.5853658536585366,
304
+ "grad_norm": 0.1534924395682076,
305
+ "learning_rate": 9.696162955735577e-05,
306
+ "loss": 1.3629,
307
+ "num_input_tokens_seen": 11784960,
308
+ "step": 30,
309
+ "train_runtime": 2572.9464,
310
+ "train_tokens_per_second": 4580.336
311
+ },
312
+ {
313
+ "epoch": 0.6048780487804878,
314
+ "grad_norm": 0.23747387980155885,
315
+ "learning_rate": 9.675081213427076e-05,
316
+ "loss": 1.3163,
317
+ "num_input_tokens_seen": 12177792,
318
+ "step": 31,
319
+ "train_runtime": 2658.4902,
320
+ "train_tokens_per_second": 4580.717
321
+ },
322
+ {
323
+ "epoch": 0.624390243902439,
324
+ "grad_norm": 0.1724017252468989,
325
+ "learning_rate": 9.653316917261512e-05,
326
+ "loss": 1.3058,
327
+ "num_input_tokens_seen": 12570624,
328
+ "step": 32,
329
+ "train_runtime": 2743.9643,
330
+ "train_tokens_per_second": 4581.191
331
+ },
332
+ {
333
+ "epoch": 0.6439024390243903,
334
+ "grad_norm": 0.1619965153711787,
335
+ "learning_rate": 9.630873244788883e-05,
336
+ "loss": 1.2774,
337
+ "num_input_tokens_seen": 12963456,
338
+ "step": 33,
339
+ "train_runtime": 2829.4055,
340
+ "train_tokens_per_second": 4581.689
341
+ },
342
+ {
343
+ "epoch": 0.6634146341463415,
344
+ "grad_norm": 0.18133620259351485,
345
+ "learning_rate": 9.607753472746966e-05,
346
+ "loss": 1.2585,
347
+ "num_input_tokens_seen": 13356288,
348
+ "step": 34,
349
+ "train_runtime": 2914.8292,
350
+ "train_tokens_per_second": 4582.185
351
+ },
352
+ {
353
+ "epoch": 0.6829268292682927,
354
+ "grad_norm": 0.20121493987838818,
355
+ "learning_rate": 9.583960976582913e-05,
356
+ "loss": 1.2429,
357
+ "num_input_tokens_seen": 13749120,
358
+ "step": 35,
359
+ "train_runtime": 3000.3011,
360
+ "train_tokens_per_second": 4582.58
361
+ },
362
+ {
363
+ "epoch": 0.7024390243902439,
364
+ "grad_norm": 0.2485985257238104,
365
+ "learning_rate": 9.559499229960451e-05,
366
+ "loss": 1.2092,
367
+ "num_input_tokens_seen": 14141952,
368
+ "step": 36,
369
+ "train_runtime": 3085.8763,
370
+ "train_tokens_per_second": 4582.799
371
+ },
372
+ {
373
+ "epoch": 0.7219512195121951,
374
+ "grad_norm": 0.24323454189307991,
375
+ "learning_rate": 9.534371804252728e-05,
376
+ "loss": 1.1789,
377
+ "num_input_tokens_seen": 14534784,
378
+ "step": 37,
379
+ "train_runtime": 3171.7531,
380
+ "train_tokens_per_second": 4582.571
381
+ },
382
+ {
383
+ "epoch": 0.7414634146341463,
384
+ "grad_norm": 0.34801979161039454,
385
+ "learning_rate": 9.508582368020897e-05,
386
+ "loss": 1.1495,
387
+ "num_input_tokens_seen": 14927616,
388
+ "step": 38,
389
+ "train_runtime": 3257.3777,
390
+ "train_tokens_per_second": 4582.71
391
+ },
392
+ {
393
+ "epoch": 0.7609756097560976,
394
+ "grad_norm": 0.6127611336291061,
395
+ "learning_rate": 9.482134686478519e-05,
396
+ "loss": 1.141,
397
+ "num_input_tokens_seen": 15320448,
398
+ "step": 39,
399
+ "train_runtime": 3342.782,
400
+ "train_tokens_per_second": 4583.143
401
+ },
402
+ {
403
+ "epoch": 0.7804878048780488,
404
+ "grad_norm": 0.6420115225722821,
405
+ "learning_rate": 9.45503262094184e-05,
406
+ "loss": 1.1236,
407
+ "num_input_tokens_seen": 15713280,
408
+ "step": 40,
409
+ "train_runtime": 3428.2665,
410
+ "train_tokens_per_second": 4583.448
411
+ },
412
+ {
413
+ "epoch": 0.8,
414
+ "grad_norm": 0.28502804596451176,
415
+ "learning_rate": 9.42728012826605e-05,
416
+ "loss": 1.1034,
417
+ "num_input_tokens_seen": 16106112,
418
+ "step": 41,
419
+ "train_runtime": 3513.7978,
420
+ "train_tokens_per_second": 4583.676
421
+ },
422
+ {
423
+ "epoch": 0.8195121951219512,
424
+ "grad_norm": 0.716313084661846,
425
+ "learning_rate": 9.398881260267589e-05,
426
+ "loss": 1.1052,
427
+ "num_input_tokens_seen": 16498944,
428
+ "step": 42,
429
+ "train_runtime": 3599.2462,
430
+ "train_tokens_per_second": 4584.0
431
+ },
432
+ {
433
+ "epoch": 0.8390243902439024,
434
+ "grad_norm": 0.3220303313208368,
435
+ "learning_rate": 9.36984016313259e-05,
436
+ "loss": 1.0595,
437
+ "num_input_tokens_seen": 16891776,
438
+ "step": 43,
439
+ "train_runtime": 3684.6336,
440
+ "train_tokens_per_second": 4584.384
441
+ },
442
+ {
443
+ "epoch": 0.8585365853658536,
444
+ "grad_norm": 0.46297801425335283,
445
+ "learning_rate": 9.340161076811537e-05,
446
+ "loss": 1.0507,
447
+ "num_input_tokens_seen": 17284608,
448
+ "step": 44,
449
+ "train_runtime": 3770.0588,
450
+ "train_tokens_per_second": 4584.705
451
+ },
452
+ {
453
+ "epoch": 0.8780487804878049,
454
+ "grad_norm": 0.3229025177696428,
455
+ "learning_rate": 9.309848334400246e-05,
456
+ "loss": 1.0204,
457
+ "num_input_tokens_seen": 17677440,
458
+ "step": 45,
459
+ "train_runtime": 3855.5127,
460
+ "train_tokens_per_second": 4584.978
461
+ },
462
+ {
463
+ "epoch": 0.8975609756097561,
464
+ "grad_norm": 0.4974629296010873,
465
+ "learning_rate": 9.278906361507238e-05,
466
+ "loss": 1.0275,
467
+ "num_input_tokens_seen": 18070272,
468
+ "step": 46,
469
+ "train_runtime": 3940.8899,
470
+ "train_tokens_per_second": 4585.328
471
+ },
472
+ {
473
+ "epoch": 0.9170731707317074,
474
+ "grad_norm": 0.3252828591259486,
475
+ "learning_rate": 9.247339675607605e-05,
476
+ "loss": 0.9942,
477
+ "num_input_tokens_seen": 18463104,
478
+ "step": 47,
479
+ "train_runtime": 4026.4826,
480
+ "train_tokens_per_second": 4585.418
481
+ },
482
+ {
483
+ "epoch": 0.9365853658536586,
484
+ "grad_norm": 0.4096451495593207,
485
+ "learning_rate": 9.215152885383474e-05,
486
+ "loss": 0.9636,
487
+ "num_input_tokens_seen": 18855936,
488
+ "step": 48,
489
+ "train_runtime": 4111.9693,
490
+ "train_tokens_per_second": 4585.622
491
+ },
492
+ {
493
+ "epoch": 0.9560975609756097,
494
+ "grad_norm": 0.34175853473817397,
495
+ "learning_rate": 9.182350690051133e-05,
496
+ "loss": 0.9595,
497
+ "num_input_tokens_seen": 19248768,
498
+ "step": 49,
499
+ "train_runtime": 4197.3817,
500
+ "train_tokens_per_second": 4585.899
501
+ },
502
+ {
503
+ "epoch": 0.975609756097561,
504
+ "grad_norm": 0.7544878577908084,
505
+ "learning_rate": 9.148937878674976e-05,
506
+ "loss": 0.9406,
507
+ "num_input_tokens_seen": 19641600,
508
+ "step": 50,
509
+ "train_runtime": 4282.7892,
510
+ "train_tokens_per_second": 4586.17
511
+ },
512
+ {
513
+ "epoch": 0.9951219512195122,
514
+ "grad_norm": 0.45005702668900516,
515
+ "learning_rate": 9.114919329468282e-05,
516
+ "loss": 0.9266,
517
+ "num_input_tokens_seen": 20034432,
518
+ "step": 51,
519
+ "train_runtime": 4368.202,
520
+ "train_tokens_per_second": 4586.425
521
+ },
522
+ {
523
+ "epoch": 1.0,
524
+ "grad_norm": 0.5681712533129535,
525
+ "learning_rate": 9.080300009081024e-05,
526
+ "loss": 0.9145,
527
+ "num_input_tokens_seen": 20132640,
528
+ "step": 52,
529
+ "train_runtime": 4389.6005,
530
+ "train_tokens_per_second": 4586.44
531
+ },
532
+ {
533
+ "epoch": 1.0195121951219512,
534
+ "grad_norm": 0.5035806305267847,
535
+ "learning_rate": 9.045084971874738e-05,
536
+ "loss": 0.8835,
537
+ "num_input_tokens_seen": 20525472,
538
+ "step": 53,
539
+ "train_runtime": 4475.0972,
540
+ "train_tokens_per_second": 4586.598
541
+ },
542
+ {
543
+ "epoch": 1.0390243902439025,
544
+ "grad_norm": 1.5437125934865135,
545
+ "learning_rate": 9.009279359184593e-05,
546
+ "loss": 0.8894,
547
+ "num_input_tokens_seen": 20918304,
548
+ "step": 54,
549
+ "train_runtime": 4560.5362,
550
+ "train_tokens_per_second": 4586.808
551
+ },
552
+ {
553
+ "epoch": 1.0585365853658537,
554
+ "grad_norm": 2.5544072695829527,
555
+ "learning_rate": 8.972888398568772e-05,
556
+ "loss": 0.8801,
557
+ "num_input_tokens_seen": 21311136,
558
+ "step": 55,
559
+ "train_runtime": 4646.0135,
560
+ "train_tokens_per_second": 4586.972
561
+ },
562
+ {
563
+ "epoch": 1.078048780487805,
564
+ "grad_norm": 6.413568180777226,
565
+ "learning_rate": 8.935917403045251e-05,
566
+ "loss": 0.8761,
567
+ "num_input_tokens_seen": 21703968,
568
+ "step": 56,
569
+ "train_runtime": 4731.4435,
570
+ "train_tokens_per_second": 4587.177
571
+ },
572
+ {
573
+ "epoch": 1.0975609756097562,
574
+ "grad_norm": 3.6652053687664377,
575
+ "learning_rate": 8.898371770316111e-05,
576
+ "loss": 0.8785,
577
+ "num_input_tokens_seen": 22096800,
578
+ "step": 57,
579
+ "train_runtime": 4816.8429,
580
+ "train_tokens_per_second": 4587.403
581
+ },
582
+ {
583
+ "epoch": 1.1170731707317074,
584
+ "grad_norm": 2.350713418610044,
585
+ "learning_rate": 8.860256981979484e-05,
586
+ "loss": 0.8964,
587
+ "num_input_tokens_seen": 22489632,
588
+ "step": 58,
589
+ "train_runtime": 4902.2866,
590
+ "train_tokens_per_second": 4587.58
591
+ },
592
+ {
593
+ "epoch": 1.1365853658536587,
594
+ "grad_norm": 1.458077107848317,
595
+ "learning_rate": 8.821578602729242e-05,
596
+ "loss": 0.8833,
597
+ "num_input_tokens_seen": 22882464,
598
+ "step": 59,
599
+ "train_runtime": 4987.6858,
600
+ "train_tokens_per_second": 4587.792
601
+ },
602
+ {
603
+ "epoch": 1.1560975609756097,
604
+ "grad_norm": 2.3627094246816553,
605
+ "learning_rate": 8.782342279542568e-05,
606
+ "loss": 0.8698,
607
+ "num_input_tokens_seen": 23275296,
608
+ "step": 60,
609
+ "train_runtime": 5073.0959,
610
+ "train_tokens_per_second": 4587.987
611
+ },
612
+ {
613
+ "epoch": 1.175609756097561,
614
+ "grad_norm": 1.3869190096740975,
615
+ "learning_rate": 8.742553740855506e-05,
616
+ "loss": 0.864,
617
+ "num_input_tokens_seen": 23668128,
618
+ "step": 61,
619
+ "train_runtime": 5158.4893,
620
+ "train_tokens_per_second": 4588.19
621
+ },
622
+ {
623
+ "epoch": 1.1951219512195121,
624
+ "grad_norm": 1.1437940013985328,
625
+ "learning_rate": 8.702218795726617e-05,
626
+ "loss": 0.8551,
627
+ "num_input_tokens_seen": 24060960,
628
+ "step": 62,
629
+ "train_runtime": 5243.865,
630
+ "train_tokens_per_second": 4588.402
631
+ },
632
+ {
633
+ "epoch": 1.2146341463414634,
634
+ "grad_norm": 2.801137587074987,
635
+ "learning_rate": 8.661343332988869e-05,
636
+ "loss": 0.8327,
637
+ "num_input_tokens_seen": 24453792,
638
+ "step": 63,
639
+ "train_runtime": 5329.3228,
640
+ "train_tokens_per_second": 4588.536
641
+ },
642
+ {
643
+ "epoch": 1.2341463414634146,
644
+ "grad_norm": 1.1087446235917915,
645
+ "learning_rate": 8.619933320389872e-05,
646
+ "loss": 0.8003,
647
+ "num_input_tokens_seen": 24846624,
648
+ "step": 64,
649
+ "train_runtime": 5414.7776,
650
+ "train_tokens_per_second": 4588.669
651
+ },
652
+ {
653
+ "epoch": 1.2536585365853659,
654
+ "grad_norm": 1.1639583727147402,
655
+ "learning_rate": 8.577994803720606e-05,
656
+ "loss": 0.8078,
657
+ "num_input_tokens_seen": 25239456,
658
+ "step": 65,
659
+ "train_runtime": 5500.2413,
660
+ "train_tokens_per_second": 4588.791
661
+ },
662
+ {
663
+ "epoch": 1.273170731707317,
664
+ "grad_norm": 0.8450850350334035,
665
+ "learning_rate": 8.535533905932738e-05,
666
+ "loss": 0.8033,
667
+ "num_input_tokens_seen": 25632288,
668
+ "step": 66,
669
+ "train_runtime": 5585.6797,
670
+ "train_tokens_per_second": 4588.929
671
+ },
672
+ {
673
+ "epoch": 1.2926829268292683,
674
+ "grad_norm": 4.138498725123297,
675
+ "learning_rate": 8.492556826244687e-05,
676
+ "loss": 0.7751,
677
+ "num_input_tokens_seen": 26025120,
678
+ "step": 67,
679
+ "train_runtime": 5671.0727,
680
+ "train_tokens_per_second": 4589.1
681
+ },
682
+ {
683
+ "epoch": 1.3121951219512196,
684
+ "grad_norm": 1.2519271390201068,
685
+ "learning_rate": 8.449069839236538e-05,
686
+ "loss": 0.7998,
687
+ "num_input_tokens_seen": 26417952,
688
+ "step": 68,
689
+ "train_runtime": 5756.4201,
690
+ "train_tokens_per_second": 4589.302
691
+ },
692
+ {
693
+ "epoch": 1.3317073170731708,
694
+ "grad_norm": 2.0612640489651586,
695
+ "learning_rate": 8.405079293933986e-05,
696
+ "loss": 0.7949,
697
+ "num_input_tokens_seen": 26810784,
698
+ "step": 69,
699
+ "train_runtime": 5841.8015,
700
+ "train_tokens_per_second": 4589.472
701
+ },
702
+ {
703
+ "epoch": 1.3512195121951218,
704
+ "grad_norm": 1.6632187783624717,
705
+ "learning_rate": 8.360591612881364e-05,
706
+ "loss": 0.7718,
707
+ "num_input_tokens_seen": 27203616,
708
+ "step": 70,
709
+ "train_runtime": 5927.224,
710
+ "train_tokens_per_second": 4589.605
711
+ },
712
+ {
713
+ "epoch": 1.370731707317073,
714
+ "grad_norm": 0.8557124244577246,
715
+ "learning_rate": 8.315613291203976e-05,
716
+ "loss": 0.7827,
717
+ "num_input_tokens_seen": 27596448,
718
+ "step": 71,
719
+ "train_runtime": 6012.6847,
720
+ "train_tokens_per_second": 4589.705
721
+ },
722
+ {
723
+ "epoch": 1.3902439024390243,
724
+ "grad_norm": 1.0542485300201978,
725
+ "learning_rate": 8.270150895659823e-05,
726
+ "loss": 0.7595,
727
+ "num_input_tokens_seen": 27989280,
728
+ "step": 72,
729
+ "train_runtime": 6098.1854,
730
+ "train_tokens_per_second": 4589.772
731
+ },
732
+ {
733
+ "epoch": 1.4097560975609755,
734
+ "grad_norm": 1.7657947939512917,
735
+ "learning_rate": 8.224211063680853e-05,
736
+ "loss": 0.7416,
737
+ "num_input_tokens_seen": 28382112,
738
+ "step": 73,
739
+ "train_runtime": 6183.6199,
740
+ "train_tokens_per_second": 4589.886
741
+ },
742
+ {
743
+ "epoch": 1.4292682926829268,
744
+ "grad_norm": 1.8137027126017153,
745
+ "learning_rate": 8.177800502403928e-05,
746
+ "loss": 0.749,
747
+ "num_input_tokens_seen": 28774944,
748
+ "step": 74,
749
+ "train_runtime": 6269.1266,
750
+ "train_tokens_per_second": 4589.945
751
+ },
752
+ {
753
+ "epoch": 1.448780487804878,
754
+ "grad_norm": 1.3646726182984732,
755
+ "learning_rate": 8.130925987691569e-05,
756
+ "loss": 0.72,
757
+ "num_input_tokens_seen": 29167776,
758
+ "step": 75,
759
+ "train_runtime": 6354.5877,
760
+ "train_tokens_per_second": 4590.034
761
+ },
762
+ {
763
+ "epoch": 1.4682926829268292,
764
+ "grad_norm": 2.817948027228682,
765
+ "learning_rate": 8.083594363142717e-05,
766
+ "loss": 0.7192,
767
+ "num_input_tokens_seen": 29560608,
768
+ "step": 76,
769
+ "train_runtime": 6440.0829,
770
+ "train_tokens_per_second": 4590.097
771
+ },
772
+ {
773
+ "epoch": 1.4878048780487805,
774
+ "grad_norm": 2.8685745350018976,
775
+ "learning_rate": 8.035812539093557e-05,
776
+ "loss": 0.7221,
777
+ "num_input_tokens_seen": 29953440,
778
+ "step": 77,
779
+ "train_runtime": 6525.6763,
780
+ "train_tokens_per_second": 4590.09
781
+ },
782
+ {
783
+ "epoch": 1.5073170731707317,
784
+ "grad_norm": 0.806171095791916,
785
+ "learning_rate": 7.987587491608637e-05,
786
+ "loss": 0.7031,
787
+ "num_input_tokens_seen": 30346272,
788
+ "step": 78,
789
+ "train_runtime": 6611.3229,
790
+ "train_tokens_per_second": 4590.045
791
+ },
792
+ {
793
+ "epoch": 1.526829268292683,
794
+ "grad_norm": 3.0602847716922237,
795
+ "learning_rate": 7.938926261462366e-05,
796
+ "loss": 0.7118,
797
+ "num_input_tokens_seen": 30739104,
798
+ "step": 79,
799
+ "train_runtime": 6696.8792,
800
+ "train_tokens_per_second": 4590.064
801
+ },
802
+ {
803
+ "epoch": 1.5463414634146342,
804
+ "grad_norm": 1.0137101758685187,
805
+ "learning_rate": 7.889835953111075e-05,
806
+ "loss": 0.6952,
807
+ "num_input_tokens_seen": 31131936,
808
+ "step": 80,
809
+ "train_runtime": 6782.7752,
810
+ "train_tokens_per_second": 4589.852
811
+ },
812
+ {
813
+ "epoch": 1.5658536585365854,
814
+ "grad_norm": 1.4320523827445433,
815
+ "learning_rate": 7.840323733655778e-05,
816
+ "loss": 0.6693,
817
+ "num_input_tokens_seen": 31524768,
818
+ "step": 81,
819
+ "train_runtime": 6868.9208,
820
+ "train_tokens_per_second": 4589.479
821
+ },
822
+ {
823
+ "epoch": 1.5853658536585367,
824
+ "grad_norm": 3.2012899077086177,
825
+ "learning_rate": 7.790396831795792e-05,
826
+ "loss": 0.6718,
827
+ "num_input_tokens_seen": 31917600,
828
+ "step": 82,
829
+ "train_runtime": 6954.5194,
830
+ "train_tokens_per_second": 4589.476
831
+ },
832
+ {
833
+ "epoch": 1.604878048780488,
834
+ "grad_norm": 0.7553296877905612,
835
+ "learning_rate": 7.740062536773352e-05,
836
+ "loss": 0.67,
837
+ "num_input_tokens_seen": 32310432,
838
+ "step": 83,
839
+ "train_runtime": 7040.0533,
840
+ "train_tokens_per_second": 4589.515
841
+ },
842
+ {
843
+ "epoch": 1.6243902439024391,
844
+ "grad_norm": 1.069764102117734,
845
+ "learning_rate": 7.689328197309393e-05,
846
+ "loss": 0.6688,
847
+ "num_input_tokens_seen": 32703264,
848
+ "step": 84,
849
+ "train_runtime": 7125.6152,
850
+ "train_tokens_per_second": 4589.536
851
+ },
852
+ {
853
+ "epoch": 1.6439024390243904,
854
+ "grad_norm": 2.635419412507475,
855
+ "learning_rate": 7.638201220530665e-05,
856
+ "loss": 0.6672,
857
+ "num_input_tokens_seen": 33096096,
858
+ "step": 85,
859
+ "train_runtime": 7211.09,
860
+ "train_tokens_per_second": 4589.611
861
+ },
862
+ {
863
+ "epoch": 1.6634146341463416,
864
+ "grad_norm": 0.7024644501267906,
865
+ "learning_rate": 7.586689070888284e-05,
866
+ "loss": 0.6573,
867
+ "num_input_tokens_seen": 33488928,
868
+ "step": 86,
869
+ "train_runtime": 7296.5933,
870
+ "train_tokens_per_second": 4589.666
871
+ },
872
+ {
873
+ "epoch": 1.6829268292682928,
874
+ "grad_norm": 0.44428213007263,
875
+ "learning_rate": 7.534799269067953e-05,
876
+ "loss": 0.6422,
877
+ "num_input_tokens_seen": 33881760,
878
+ "step": 87,
879
+ "train_runtime": 7382.0301,
880
+ "train_tokens_per_second": 4589.762
881
+ },
882
+ {
883
+ "epoch": 1.7024390243902439,
884
+ "grad_norm": 0.5416450836446436,
885
+ "learning_rate": 7.48253939089194e-05,
886
+ "loss": 0.6269,
887
+ "num_input_tokens_seen": 34274592,
888
+ "step": 88,
889
+ "train_runtime": 7467.4502,
890
+ "train_tokens_per_second": 4589.865
891
+ },
892
+ {
893
+ "epoch": 1.721951219512195,
894
+ "grad_norm": 0.5642965602823063,
895
+ "learning_rate": 7.42991706621303e-05,
896
+ "loss": 0.6357,
897
+ "num_input_tokens_seen": 34667424,
898
+ "step": 89,
899
+ "train_runtime": 7553.0173,
900
+ "train_tokens_per_second": 4589.878
901
+ },
902
+ {
903
+ "epoch": 1.7414634146341463,
904
+ "grad_norm": 0.5097755585636675,
905
+ "learning_rate": 7.376939977800582e-05,
906
+ "loss": 0.6299,
907
+ "num_input_tokens_seen": 35060256,
908
+ "step": 90,
909
+ "train_runtime": 7638.6282,
910
+ "train_tokens_per_second": 4589.863
911
+ },
912
+ {
913
+ "epoch": 1.7609756097560976,
914
+ "grad_norm": 0.45556407265977034,
915
+ "learning_rate": 7.323615860218843e-05,
916
+ "loss": 0.634,
917
+ "num_input_tokens_seen": 35453088,
918
+ "step": 91,
919
+ "train_runtime": 7724.1622,
920
+ "train_tokens_per_second": 4589.894
921
+ },
922
+ {
923
+ "epoch": 1.7804878048780488,
924
+ "grad_norm": 0.4536282771598066,
925
+ "learning_rate": 7.269952498697734e-05,
926
+ "loss": 0.6251,
927
+ "num_input_tokens_seen": 35845920,
928
+ "step": 92,
929
+ "train_runtime": 7809.6142,
930
+ "train_tokens_per_second": 4589.973
931
+ },
932
+ {
933
+ "epoch": 1.8,
934
+ "grad_norm": 0.43350812308828435,
935
+ "learning_rate": 7.215957727996207e-05,
936
+ "loss": 0.62,
937
+ "num_input_tokens_seen": 36238752,
938
+ "step": 93,
939
+ "train_runtime": 7895.006,
940
+ "train_tokens_per_second": 4590.085
941
+ },
942
+ {
943
+ "epoch": 1.819512195121951,
944
+ "grad_norm": 0.40833077692944497,
945
+ "learning_rate": 7.161639431258387e-05,
946
+ "loss": 0.6073,
947
+ "num_input_tokens_seen": 36631584,
948
+ "step": 94,
949
+ "train_runtime": 7980.4652,
950
+ "train_tokens_per_second": 4590.156
951
+ },
952
+ {
953
+ "epoch": 1.8390243902439023,
954
+ "grad_norm": 0.4235068021237779,
955
+ "learning_rate": 7.107005538862646e-05,
956
+ "loss": 0.6074,
957
+ "num_input_tokens_seen": 37024416,
958
+ "step": 95,
959
+ "train_runtime": 8065.9658,
960
+ "train_tokens_per_second": 4590.202
961
+ },
962
+ {
963
+ "epoch": 1.8585365853658535,
964
+ "grad_norm": 0.4132415083645228,
965
+ "learning_rate": 7.052064027263786e-05,
966
+ "loss": 0.6064,
967
+ "num_input_tokens_seen": 37417248,
968
+ "step": 96,
969
+ "train_runtime": 8151.397,
970
+ "train_tokens_per_second": 4590.287
971
+ },
972
+ {
973
+ "epoch": 1.8780487804878048,
974
+ "grad_norm": 0.4322861748139327,
975
+ "learning_rate": 6.996822917828477e-05,
976
+ "loss": 0.5922,
977
+ "num_input_tokens_seen": 37810080,
978
+ "step": 97,
979
+ "train_runtime": 8236.8862,
980
+ "train_tokens_per_second": 4590.337
981
+ },
982
+ {
983
+ "epoch": 1.897560975609756,
984
+ "grad_norm": 0.38942999050451615,
985
+ "learning_rate": 6.941290275664174e-05,
986
+ "loss": 0.5939,
987
+ "num_input_tokens_seen": 38202912,
988
+ "step": 98,
989
+ "train_runtime": 8322.3209,
990
+ "train_tokens_per_second": 4590.416
991
+ },
992
+ {
993
+ "epoch": 1.9170731707317072,
994
+ "grad_norm": 0.3753721554361611,
995
+ "learning_rate": 6.885474208441603e-05,
996
+ "loss": 0.5913,
997
+ "num_input_tokens_seen": 38595744,
998
+ "step": 99,
999
+ "train_runtime": 8407.7641,
1000
+ "train_tokens_per_second": 4590.488
1001
+ },
1002
+ {
1003
+ "epoch": 1.9365853658536585,
1004
+ "grad_norm": 0.35005150318450906,
1005
+ "learning_rate": 6.829382865211063e-05,
1006
+ "loss": 0.5863,
1007
+ "num_input_tokens_seen": 38988576,
1008
+ "step": 100,
1009
+ "train_runtime": 8493.2736,
1010
+ "train_tokens_per_second": 4590.524
1011
+ },
1012
+ {
1013
+ "epoch": 1.9560975609756097,
1014
+ "grad_norm": 0.37808190956380233,
1015
+ "learning_rate": 6.773024435212678e-05,
1016
+ "loss": 0.5822,
1017
+ "num_input_tokens_seen": 39381408,
1018
+ "step": 101,
1019
+ "train_runtime": 8578.7547,
1020
+ "train_tokens_per_second": 4590.574
1021
+ },
1022
+ {
1023
+ "epoch": 1.975609756097561,
1024
+ "grad_norm": 0.3580568006066326,
1025
+ "learning_rate": 6.716407146680792e-05,
1026
+ "loss": 0.5666,
1027
+ "num_input_tokens_seen": 39774240,
1028
+ "step": 102,
1029
+ "train_runtime": 8664.2328,
1030
+ "train_tokens_per_second": 4590.625
1031
+ },
1032
+ {
1033
+ "epoch": 1.9951219512195122,
1034
+ "grad_norm": 0.39572577046735974,
1035
+ "learning_rate": 6.659539265642643e-05,
1036
+ "loss": 0.5806,
1037
+ "num_input_tokens_seen": 40167072,
1038
+ "step": 103,
1039
+ "train_runtime": 8749.6718,
1040
+ "train_tokens_per_second": 4590.695
1041
+ },
1042
+ {
1043
+ "epoch": 2.0,
1044
+ "grad_norm": 0.5994329532374332,
1045
+ "learning_rate": 6.602429094711548e-05,
1046
+ "loss": 0.5605,
1047
+ "num_input_tokens_seen": 40265280,
1048
+ "step": 104,
1049
+ "train_runtime": 8771.0021,
1050
+ "train_tokens_per_second": 4590.727
1051
+ },
1052
+ {
1053
+ "epoch": 2.0195121951219512,
1054
+ "grad_norm": 0.3746452769053094,
1055
+ "learning_rate": 6.545084971874738e-05,
1056
+ "loss": 0.5462,
1057
+ "num_input_tokens_seen": 40658112,
1058
+ "step": 105,
1059
+ "train_runtime": 8856.5287,
1060
+ "train_tokens_per_second": 4590.75
1061
+ },
1062
+ {
1063
+ "epoch": 2.0390243902439025,
1064
+ "grad_norm": 0.453029236937953,
1065
+ "learning_rate": 6.487515269276016e-05,
1066
+ "loss": 0.5447,
1067
+ "num_input_tokens_seen": 41050944,
1068
+ "step": 106,
1069
+ "train_runtime": 8942.0292,
1070
+ "train_tokens_per_second": 4590.786
1071
+ },
1072
+ {
1073
+ "epoch": 2.0585365853658537,
1074
+ "grad_norm": 0.39272552581140363,
1075
+ "learning_rate": 6.429728391993446e-05,
1076
+ "loss": 0.5405,
1077
+ "num_input_tokens_seen": 41443776,
1078
+ "step": 107,
1079
+ "train_runtime": 9027.5293,
1080
+ "train_tokens_per_second": 4590.822
1081
+ },
1082
+ {
1083
+ "epoch": 2.078048780487805,
1084
+ "grad_norm": 0.35801484704748626,
1085
+ "learning_rate": 6.37173277681223e-05,
1086
+ "loss": 0.5433,
1087
+ "num_input_tokens_seen": 41836608,
1088
+ "step": 108,
1089
+ "train_runtime": 9113.0265,
1090
+ "train_tokens_per_second": 4590.858
1091
+ },
1092
+ {
1093
+ "epoch": 2.097560975609756,
1094
+ "grad_norm": 0.3703575056088343,
1095
+ "learning_rate": 6.313536890992935e-05,
1096
+ "loss": 0.5335,
1097
+ "num_input_tokens_seen": 42229440,
1098
+ "step": 109,
1099
+ "train_runtime": 9198.6667,
1100
+ "train_tokens_per_second": 4590.822
1101
+ },
1102
+ {
1103
+ "epoch": 2.1170731707317074,
1104
+ "grad_norm": 0.3456947105084525,
1105
+ "learning_rate": 6.255149231035309e-05,
1106
+ "loss": 0.5416,
1107
+ "num_input_tokens_seen": 42622272,
1108
+ "step": 110,
1109
+ "train_runtime": 9284.269,
1110
+ "train_tokens_per_second": 4590.805
1111
+ },
1112
+ {
1113
+ "epoch": 2.1365853658536587,
1114
+ "grad_norm": 0.33745389931809666,
1115
+ "learning_rate": 6.19657832143779e-05,
1116
+ "loss": 0.5322,
1117
+ "num_input_tokens_seen": 43015104,
1118
+ "step": 111,
1119
+ "train_runtime": 9369.843,
1120
+ "train_tokens_per_second": 4590.803
1121
+ },
1122
+ {
1123
+ "epoch": 2.15609756097561,
1124
+ "grad_norm": 0.33453831659901945,
1125
+ "learning_rate": 6.13783271345295e-05,
1126
+ "loss": 0.5301,
1127
+ "num_input_tokens_seen": 43407936,
1128
+ "step": 112,
1129
+ "train_runtime": 9455.4833,
1130
+ "train_tokens_per_second": 4590.769
1131
+ },
1132
+ {
1133
+ "epoch": 2.175609756097561,
1134
+ "grad_norm": 0.3355284079398747,
1135
+ "learning_rate": 6.078920983839031e-05,
1136
+ "loss": 0.5422,
1137
+ "num_input_tokens_seen": 43800768,
1138
+ "step": 113,
1139
+ "train_runtime": 9541.0542,
1140
+ "train_tokens_per_second": 4590.768
1141
+ },
1142
+ {
1143
+ "epoch": 2.1951219512195124,
1144
+ "grad_norm": 0.3257056473237624,
1145
+ "learning_rate": 6.019851733607744e-05,
1146
+ "loss": 0.5352,
1147
+ "num_input_tokens_seen": 44193600,
1148
+ "step": 114,
1149
+ "train_runtime": 9626.532,
1150
+ "train_tokens_per_second": 4590.812
1151
+ },
1152
+ {
1153
+ "epoch": 2.2146341463414636,
1154
+ "grad_norm": 0.32389003819610007,
1155
+ "learning_rate": 5.960633586768543e-05,
1156
+ "loss": 0.5112,
1157
+ "num_input_tokens_seen": 44586432,
1158
+ "step": 115,
1159
+ "train_runtime": 9712.1322,
1160
+ "train_tokens_per_second": 4590.797
1161
+ },
1162
+ {
1163
+ "epoch": 2.234146341463415,
1164
+ "grad_norm": 0.34317758929799447,
1165
+ "learning_rate": 5.90127518906953e-05,
1166
+ "loss": 0.5171,
1167
+ "num_input_tokens_seen": 44979264,
1168
+ "step": 116,
1169
+ "train_runtime": 9797.6269,
1170
+ "train_tokens_per_second": 4590.833
1171
+ },
1172
+ {
1173
+ "epoch": 2.253658536585366,
1174
+ "grad_norm": 0.31740577502228223,
1175
+ "learning_rate": 5.841785206735192e-05,
1176
+ "loss": 0.5027,
1177
+ "num_input_tokens_seen": 45372096,
1178
+ "step": 117,
1179
+ "train_runtime": 9883.3105,
1180
+ "train_tokens_per_second": 4590.779
1181
+ },
1182
+ {
1183
+ "epoch": 2.2731707317073173,
1184
+ "grad_norm": 0.3705124747857168,
1185
+ "learning_rate": 5.782172325201155e-05,
1186
+ "loss": 0.5151,
1187
+ "num_input_tokens_seen": 45764928,
1188
+ "step": 118,
1189
+ "train_runtime": 9968.9083,
1190
+ "train_tokens_per_second": 4590.766
1191
+ },
1192
+ {
1193
+ "epoch": 2.292682926829268,
1194
+ "grad_norm": 0.36613629503091577,
1195
+ "learning_rate": 5.7224452478461064e-05,
1196
+ "loss": 0.5152,
1197
+ "num_input_tokens_seen": 46157760,
1198
+ "step": 119,
1199
+ "train_runtime": 10054.434,
1200
+ "train_tokens_per_second": 4590.787
1201
+ },
1202
+ {
1203
+ "epoch": 2.3121951219512193,
1204
+ "grad_norm": 0.3314221690877754,
1205
+ "learning_rate": 5.6626126947211386e-05,
1206
+ "loss": 0.5303,
1207
+ "num_input_tokens_seen": 46550592,
1208
+ "step": 120,
1209
+ "train_runtime": 10139.9414,
1210
+ "train_tokens_per_second": 4590.815
1211
+ },
1212
+ {
1213
+ "epoch": 2.3317073170731706,
1214
+ "grad_norm": 0.3470857883762708,
1215
+ "learning_rate": 5.602683401276615e-05,
1216
+ "loss": 0.5032,
1217
+ "num_input_tokens_seen": 46943424,
1218
+ "step": 121,
1219
+ "train_runtime": 10225.7135,
1220
+ "train_tokens_per_second": 4590.724
1221
+ },
1222
+ {
1223
+ "epoch": 2.351219512195122,
1224
+ "grad_norm": 0.3136083377085674,
1225
+ "learning_rate": 5.542666117086832e-05,
1226
+ "loss": 0.5119,
1227
+ "num_input_tokens_seen": 47336256,
1228
+ "step": 122,
1229
+ "train_runtime": 10311.2617,
1230
+ "train_tokens_per_second": 4590.734
1231
+ },
1232
+ {
1233
+ "epoch": 2.370731707317073,
1234
+ "grad_norm": 0.4227118641608833,
1235
+ "learning_rate": 5.482569604572576e-05,
1236
+ "loss": 0.5045,
1237
+ "num_input_tokens_seen": 47729088,
1238
+ "step": 123,
1239
+ "train_runtime": 10396.9552,
1240
+ "train_tokens_per_second": 4590.679
1241
+ },
1242
+ {
1243
+ "epoch": 2.3902439024390243,
1244
+ "grad_norm": 0.32585539704348276,
1245
+ "learning_rate": 5.422402637721836e-05,
1246
+ "loss": 0.505,
1247
+ "num_input_tokens_seen": 48121920,
1248
+ "step": 124,
1249
+ "train_runtime": 10482.7724,
1250
+ "train_tokens_per_second": 4590.572
1251
+ },
1252
+ {
1253
+ "epoch": 2.4097560975609755,
1254
+ "grad_norm": 0.3224554976871665,
1255
+ "learning_rate": 5.3621740008088126e-05,
1256
+ "loss": 0.5017,
1257
+ "num_input_tokens_seen": 48514752,
1258
+ "step": 125,
1259
+ "train_runtime": 10568.4335,
1260
+ "train_tokens_per_second": 4590.534
1261
+ },
1262
+ {
1263
+ "epoch": 2.4292682926829268,
1264
+ "grad_norm": 0.33513514879191686,
1265
+ "learning_rate": 5.3018924871114305e-05,
1266
+ "loss": 0.5019,
1267
+ "num_input_tokens_seen": 48907584,
1268
+ "step": 126,
1269
+ "train_runtime": 10653.9397,
1270
+ "train_tokens_per_second": 4590.563
1271
+ },
1272
+ {
1273
+ "epoch": 2.448780487804878,
1274
+ "grad_norm": 0.3333825976504885,
1275
+ "learning_rate": 5.2415668976275355e-05,
1276
+ "loss": 0.4921,
1277
+ "num_input_tokens_seen": 49300416,
1278
+ "step": 127,
1279
+ "train_runtime": 10739.489,
1280
+ "train_tokens_per_second": 4590.574
1281
+ },
1282
+ {
1283
+ "epoch": 2.4682926829268292,
1284
+ "grad_norm": 0.32605283067247587,
1285
+ "learning_rate": 5.181206039789962e-05,
1286
+ "loss": 0.4951,
1287
+ "num_input_tokens_seen": 49693248,
1288
+ "step": 128,
1289
+ "train_runtime": 10825.0018,
1290
+ "train_tokens_per_second": 4590.6
1291
+ },
1292
+ {
1293
+ "epoch": 2.4878048780487805,
1294
+ "grad_norm": 0.345821204685631,
1295
+ "learning_rate": 5.1208187261806615e-05,
1296
+ "loss": 0.5076,
1297
+ "num_input_tokens_seen": 50086080,
1298
+ "step": 129,
1299
+ "train_runtime": 10910.4514,
1300
+ "train_tokens_per_second": 4590.652
1301
+ },
1302
+ {
1303
+ "epoch": 2.5073170731707317,
1304
+ "grad_norm": 0.32182997237639754,
1305
+ "learning_rate": 5.060413773244087e-05,
1306
+ "loss": 0.4974,
1307
+ "num_input_tokens_seen": 50478912,
1308
+ "step": 130,
1309
+ "train_runtime": 10996.0971,
1310
+ "train_tokens_per_second": 4590.621
1311
+ },
1312
+ {
1313
+ "epoch": 2.526829268292683,
1314
+ "grad_norm": 0.3424390071269269,
1315
+ "learning_rate": 5e-05,
1316
+ "loss": 0.5063,
1317
+ "num_input_tokens_seen": 50871744,
1318
+ "step": 131,
1319
+ "train_runtime": 11081.5772,
1320
+ "train_tokens_per_second": 4590.659
1321
+ },
1322
+ {
1323
+ "epoch": 2.546341463414634,
1324
+ "grad_norm": 0.32151933822675133,
1325
+ "learning_rate": 4.9395862267559136e-05,
1326
+ "loss": 0.5093,
1327
+ "num_input_tokens_seen": 51264576,
1328
+ "step": 132,
1329
+ "train_runtime": 11167.132,
1330
+ "train_tokens_per_second": 4590.666
1331
+ },
1332
+ {
1333
+ "epoch": 2.5658536585365854,
1334
+ "grad_norm": 0.33150875749489067,
1335
+ "learning_rate": 4.87918127381934e-05,
1336
+ "loss": 0.5019,
1337
+ "num_input_tokens_seen": 51657408,
1338
+ "step": 133,
1339
+ "train_runtime": 11252.7312,
1340
+ "train_tokens_per_second": 4590.655
1341
+ },
1342
+ {
1343
+ "epoch": 2.5853658536585367,
1344
+ "grad_norm": 0.32260765542480235,
1345
+ "learning_rate": 4.81879396021004e-05,
1346
+ "loss": 0.4836,
1347
+ "num_input_tokens_seen": 52050240,
1348
+ "step": 134,
1349
+ "train_runtime": 11338.3707,
1350
+ "train_tokens_per_second": 4590.628
1351
+ },
1352
+ {
1353
+ "epoch": 2.604878048780488,
1354
+ "grad_norm": 0.3284283947692906,
1355
+ "learning_rate": 4.758433102372466e-05,
1356
+ "loss": 0.4878,
1357
+ "num_input_tokens_seen": 52443072,
1358
+ "step": 135,
1359
+ "train_runtime": 11424.0926,
1360
+ "train_tokens_per_second": 4590.568
1361
+ },
1362
+ {
1363
+ "epoch": 2.624390243902439,
1364
+ "grad_norm": 0.3174202627091527,
1365
+ "learning_rate": 4.6981075128885693e-05,
1366
+ "loss": 0.4895,
1367
+ "num_input_tokens_seen": 52835904,
1368
+ "step": 136,
1369
+ "train_runtime": 11509.8066,
1370
+ "train_tokens_per_second": 4590.512
1371
+ },
1372
+ {
1373
+ "epoch": 2.6439024390243904,
1374
+ "grad_norm": 0.3424879966422641,
1375
+ "learning_rate": 4.6378259991911886e-05,
1376
+ "loss": 0.4987,
1377
+ "num_input_tokens_seen": 53228736,
1378
+ "step": 137,
1379
+ "train_runtime": 11595.5338,
1380
+ "train_tokens_per_second": 4590.452
1381
+ },
1382
+ {
1383
+ "epoch": 2.6634146341463416,
1384
+ "grad_norm": 0.31862495887920006,
1385
+ "learning_rate": 4.5775973622781655e-05,
1386
+ "loss": 0.4952,
1387
+ "num_input_tokens_seen": 53621568,
1388
+ "step": 138,
1389
+ "train_runtime": 11681.2149,
1390
+ "train_tokens_per_second": 4590.41
1391
+ },
1392
+ {
1393
+ "epoch": 2.682926829268293,
1394
+ "grad_norm": 0.32882715869402424,
1395
+ "learning_rate": 4.5174303954274244e-05,
1396
+ "loss": 0.4852,
1397
+ "num_input_tokens_seen": 54014400,
1398
+ "step": 139,
1399
+ "train_runtime": 11766.8708,
1400
+ "train_tokens_per_second": 4590.379
1401
+ },
1402
+ {
1403
+ "epoch": 2.7024390243902436,
1404
+ "grad_norm": 0.35853159941406193,
1405
+ "learning_rate": 4.457333882913169e-05,
1406
+ "loss": 0.4966,
1407
+ "num_input_tokens_seen": 54407232,
1408
+ "step": 140,
1409
+ "train_runtime": 11852.5434,
1410
+ "train_tokens_per_second": 4590.342
1411
+ },
1412
+ {
1413
+ "epoch": 2.721951219512195,
1414
+ "grad_norm": 0.3220506479281512,
1415
+ "learning_rate": 4.397316598723385e-05,
1416
+ "loss": 0.4851,
1417
+ "num_input_tokens_seen": 54800064,
1418
+ "step": 141,
1419
+ "train_runtime": 11938.0,
1420
+ "train_tokens_per_second": 4590.389
1421
+ },
1422
+ {
1423
+ "epoch": 2.741463414634146,
1424
+ "grad_norm": 0.338780671076278,
1425
+ "learning_rate": 4.337387305278864e-05,
1426
+ "loss": 0.4897,
1427
+ "num_input_tokens_seen": 55192896,
1428
+ "step": 142,
1429
+ "train_runtime": 12023.5136,
1430
+ "train_tokens_per_second": 4590.413
1431
+ },
1432
+ {
1433
+ "epoch": 2.7609756097560973,
1434
+ "grad_norm": 0.3467288544912658,
1435
+ "learning_rate": 4.277554752153895e-05,
1436
+ "loss": 0.4992,
1437
+ "num_input_tokens_seen": 55585728,
1438
+ "step": 143,
1439
+ "train_runtime": 12109.1196,
1440
+ "train_tokens_per_second": 4590.402
1441
+ },
1442
+ {
1443
+ "epoch": 2.7804878048780486,
1444
+ "grad_norm": 0.3172644652222353,
1445
+ "learning_rate": 4.2178276747988446e-05,
1446
+ "loss": 0.4842,
1447
+ "num_input_tokens_seen": 55978560,
1448
+ "step": 144,
1449
+ "train_runtime": 12194.6967,
1450
+ "train_tokens_per_second": 4590.402
1451
+ },
1452
+ {
1453
+ "epoch": 2.8,
1454
+ "grad_norm": 0.33792230752401564,
1455
+ "learning_rate": 4.1582147932648074e-05,
1456
+ "loss": 0.4941,
1457
+ "num_input_tokens_seen": 56371392,
1458
+ "step": 145,
1459
+ "train_runtime": 12280.2743,
1460
+ "train_tokens_per_second": 4590.402
1461
+ },
1462
+ {
1463
+ "epoch": 2.819512195121951,
1464
+ "grad_norm": 0.33669423565884143,
1465
+ "learning_rate": 4.0987248109304714e-05,
1466
+ "loss": 0.4933,
1467
+ "num_input_tokens_seen": 56764224,
1468
+ "step": 146,
1469
+ "train_runtime": 12365.7877,
1470
+ "train_tokens_per_second": 4590.425
1471
+ },
1472
+ {
1473
+ "epoch": 2.8390243902439023,
1474
+ "grad_norm": 0.32219243021646965,
1475
+ "learning_rate": 4.039366413231458e-05,
1476
+ "loss": 0.4847,
1477
+ "num_input_tokens_seen": 57157056,
1478
+ "step": 147,
1479
+ "train_runtime": 12451.3235,
1480
+ "train_tokens_per_second": 4590.44
1481
+ },
1482
+ {
1483
+ "epoch": 2.8585365853658535,
1484
+ "grad_norm": 0.32466591195010414,
1485
+ "learning_rate": 3.980148266392258e-05,
1486
+ "loss": 0.492,
1487
+ "num_input_tokens_seen": 57549888,
1488
+ "step": 148,
1489
+ "train_runtime": 12536.8522,
1490
+ "train_tokens_per_second": 4590.458
1491
+ },
1492
+ {
1493
+ "epoch": 2.8780487804878048,
1494
+ "grad_norm": 0.3484362997993096,
1495
+ "learning_rate": 3.92107901616097e-05,
1496
+ "loss": 0.4933,
1497
+ "num_input_tokens_seen": 57942720,
1498
+ "step": 149,
1499
+ "train_runtime": 12622.324,
1500
+ "train_tokens_per_second": 4590.495
1501
+ },
1502
+ {
1503
+ "epoch": 2.897560975609756,
1504
+ "grad_norm": 0.30189538682359085,
1505
+ "learning_rate": 3.86216728654705e-05,
1506
+ "loss": 0.4812,
1507
+ "num_input_tokens_seen": 58335552,
1508
+ "step": 150,
1509
+ "train_runtime": 12707.9974,
1510
+ "train_tokens_per_second": 4590.46
1511
+ },
1512
+ {
1513
+ "epoch": 2.9170731707317072,
1514
+ "grad_norm": 0.3427990142333379,
1515
+ "learning_rate": 3.803421678562213e-05,
1516
+ "loss": 0.4822,
1517
+ "num_input_tokens_seen": 58728384,
1518
+ "step": 151,
1519
+ "train_runtime": 12793.4446,
1520
+ "train_tokens_per_second": 4590.506
1521
+ },
1522
+ {
1523
+ "epoch": 2.9365853658536585,
1524
+ "grad_norm": 0.38332155033085324,
1525
+ "learning_rate": 3.744850768964692e-05,
1526
+ "loss": 0.4814,
1527
+ "num_input_tokens_seen": 59121216,
1528
+ "step": 152,
1529
+ "train_runtime": 12878.9545,
1530
+ "train_tokens_per_second": 4590.529
1531
+ },
1532
+ {
1533
+ "epoch": 2.9560975609756097,
1534
+ "grad_norm": 0.33602591668846876,
1535
+ "learning_rate": 3.6864631090070655e-05,
1536
+ "loss": 0.4921,
1537
+ "num_input_tokens_seen": 59514048,
1538
+ "step": 153,
1539
+ "train_runtime": 12964.4255,
1540
+ "train_tokens_per_second": 4590.566
1541
+ },
1542
+ {
1543
+ "epoch": 2.975609756097561,
1544
+ "grad_norm": 0.5609630326555886,
1545
+ "learning_rate": 3.628267223187771e-05,
1546
+ "loss": 0.4768,
1547
+ "num_input_tokens_seen": 59906880,
1548
+ "step": 154,
1549
+ "train_runtime": 13049.8908,
1550
+ "train_tokens_per_second": 4590.604
1551
+ },
1552
+ {
1553
+ "epoch": 2.995121951219512,
1554
+ "grad_norm": 0.3265623784171003,
1555
+ "learning_rate": 3.570271608006555e-05,
1556
+ "loss": 0.4579,
1557
+ "num_input_tokens_seen": 60299712,
1558
+ "step": 155,
1559
+ "train_runtime": 13135.5526,
1560
+ "train_tokens_per_second": 4590.573
1561
+ },
1562
+ {
1563
+ "epoch": 3.0,
1564
+ "grad_norm": 0.5314640992941223,
1565
+ "learning_rate": 3.512484730723986e-05,
1566
+ "loss": 0.4418,
1567
+ "num_input_tokens_seen": 60397920,
1568
+ "step": 156,
1569
+ "train_runtime": 13156.9996,
1570
+ "train_tokens_per_second": 4590.554
1571
+ },
1572
+ {
1573
+ "epoch": 3.0195121951219512,
1574
+ "grad_norm": 0.3203324244604146,
1575
+ "learning_rate": 3.4549150281252636e-05,
1576
+ "loss": 0.4471,
1577
+ "num_input_tokens_seen": 60790752,
1578
+ "step": 157,
1579
+ "train_runtime": 13242.5524,
1580
+ "train_tokens_per_second": 4590.562
1581
+ },
1582
+ {
1583
+ "epoch": 3.0390243902439025,
1584
+ "grad_norm": 0.32182085188193466,
1585
+ "learning_rate": 3.397570905288453e-05,
1586
+ "loss": 0.4543,
1587
+ "num_input_tokens_seen": 61183584,
1588
+ "step": 158,
1589
+ "train_runtime": 13328.1385,
1590
+ "train_tokens_per_second": 4590.557
1591
+ },
1592
+ {
1593
+ "epoch": 3.0585365853658537,
1594
+ "grad_norm": 0.34726844831899983,
1595
+ "learning_rate": 3.340460734357359e-05,
1596
+ "loss": 0.4561,
1597
+ "num_input_tokens_seen": 61576416,
1598
+ "step": 159,
1599
+ "train_runtime": 13413.7011,
1600
+ "train_tokens_per_second": 4590.561
1601
+ },
1602
+ {
1603
+ "epoch": 3.078048780487805,
1604
+ "grad_norm": 0.32939131628064333,
1605
+ "learning_rate": 3.283592853319209e-05,
1606
+ "loss": 0.4521,
1607
+ "num_input_tokens_seen": 61969248,
1608
+ "step": 160,
1609
+ "train_runtime": 13499.28,
1610
+ "train_tokens_per_second": 4590.56
1611
+ },
1612
+ {
1613
+ "epoch": 3.097560975609756,
1614
+ "grad_norm": 0.325117680484632,
1615
+ "learning_rate": 3.226975564787322e-05,
1616
+ "loss": 0.444,
1617
+ "num_input_tokens_seen": 62362080,
1618
+ "step": 161,
1619
+ "train_runtime": 13585.0343,
1620
+ "train_tokens_per_second": 4590.499
1621
+ },
1622
+ {
1623
+ "epoch": 3.1170731707317074,
1624
+ "grad_norm": 0.32737548048736476,
1625
+ "learning_rate": 3.170617134788939e-05,
1626
+ "loss": 0.4541,
1627
+ "num_input_tokens_seen": 62754912,
1628
+ "step": 162,
1629
+ "train_runtime": 13670.9946,
1630
+ "train_tokens_per_second": 4590.369
1631
+ },
1632
+ {
1633
+ "epoch": 3.1365853658536587,
1634
+ "grad_norm": 0.39482140502206775,
1635
+ "learning_rate": 3.114525791558398e-05,
1636
+ "loss": 0.4582,
1637
+ "num_input_tokens_seen": 63147744,
1638
+ "step": 163,
1639
+ "train_runtime": 13756.507,
1640
+ "train_tokens_per_second": 4590.391
1641
+ },
1642
+ {
1643
+ "epoch": 3.15609756097561,
1644
+ "grad_norm": 0.3357742173716643,
1645
+ "learning_rate": 3.0587097243358253e-05,
1646
+ "loss": 0.4429,
1647
+ "num_input_tokens_seen": 63540576,
1648
+ "step": 164,
1649
+ "train_runtime": 13842.0341,
1650
+ "train_tokens_per_second": 4590.407
1651
+ },
1652
+ {
1653
+ "epoch": 3.175609756097561,
1654
+ "grad_norm": 0.3305601057024288,
1655
+ "learning_rate": 3.003177082171523e-05,
1656
+ "loss": 0.4443,
1657
+ "num_input_tokens_seen": 63933408,
1658
+ "step": 165,
1659
+ "train_runtime": 13927.6175,
1660
+ "train_tokens_per_second": 4590.405
1661
+ },
1662
+ {
1663
+ "epoch": 3.1951219512195124,
1664
+ "grad_norm": 0.32083308624205303,
1665
+ "learning_rate": 2.9479359727362173e-05,
1666
+ "loss": 0.4521,
1667
+ "num_input_tokens_seen": 64326240,
1668
+ "step": 166,
1669
+ "train_runtime": 14013.2528,
1670
+ "train_tokens_per_second": 4590.386
1671
+ },
1672
+ {
1673
+ "epoch": 3.2146341463414636,
1674
+ "grad_norm": 0.3335280375327217,
1675
+ "learning_rate": 2.8929944611373554e-05,
1676
+ "loss": 0.4362,
1677
+ "num_input_tokens_seen": 64719072,
1678
+ "step": 167,
1679
+ "train_runtime": 14099.4999,
1680
+ "train_tokens_per_second": 4590.168
1681
+ },
1682
+ {
1683
+ "epoch": 3.234146341463415,
1684
+ "grad_norm": 0.31479589078333686,
1685
+ "learning_rate": 2.8383605687416125e-05,
1686
+ "loss": 0.4503,
1687
+ "num_input_tokens_seen": 65111904,
1688
+ "step": 168,
1689
+ "train_runtime": 14184.9981,
1690
+ "train_tokens_per_second": 4590.195
1691
+ },
1692
+ {
1693
+ "epoch": 3.253658536585366,
1694
+ "grad_norm": 0.3729374255823192,
1695
+ "learning_rate": 2.784042272003794e-05,
1696
+ "loss": 0.448,
1697
+ "num_input_tokens_seen": 65504736,
1698
+ "step": 169,
1699
+ "train_runtime": 14270.519,
1700
+ "train_tokens_per_second": 4590.214
1701
+ },
1702
+ {
1703
+ "epoch": 3.2731707317073173,
1704
+ "grad_norm": 0.32769391044042884,
1705
+ "learning_rate": 2.7300475013022663e-05,
1706
+ "loss": 0.4495,
1707
+ "num_input_tokens_seen": 65897568,
1708
+ "step": 170,
1709
+ "train_runtime": 14355.9895,
1710
+ "train_tokens_per_second": 4590.249
1711
+ },
1712
+ {
1713
+ "epoch": 3.292682926829268,
1714
+ "grad_norm": 0.3747118383247798,
1715
+ "learning_rate": 2.6763841397811573e-05,
1716
+ "loss": 0.4512,
1717
+ "num_input_tokens_seen": 66290400,
1718
+ "step": 171,
1719
+ "train_runtime": 14441.5179,
1720
+ "train_tokens_per_second": 4590.265
1721
+ },
1722
+ {
1723
+ "epoch": 3.3121951219512193,
1724
+ "grad_norm": 0.35364574417316713,
1725
+ "learning_rate": 2.6230600221994196e-05,
1726
+ "loss": 0.4342,
1727
+ "num_input_tokens_seen": 66683232,
1728
+ "step": 172,
1729
+ "train_runtime": 14527.0516,
1730
+ "train_tokens_per_second": 4590.28
1731
+ },
1732
+ {
1733
+ "epoch": 3.3317073170731706,
1734
+ "grad_norm": 0.34897968261286955,
1735
+ "learning_rate": 2.57008293378697e-05,
1736
+ "loss": 0.4456,
1737
+ "num_input_tokens_seen": 67076064,
1738
+ "step": 173,
1739
+ "train_runtime": 14612.5638,
1740
+ "train_tokens_per_second": 4590.301
1741
+ },
1742
+ {
1743
+ "epoch": 3.351219512195122,
1744
+ "grad_norm": 0.3526301707012528,
1745
+ "learning_rate": 2.5174606091080627e-05,
1746
+ "loss": 0.4459,
1747
+ "num_input_tokens_seen": 67468896,
1748
+ "step": 174,
1749
+ "train_runtime": 14698.0406,
1750
+ "train_tokens_per_second": 4590.333
1751
+ },
1752
+ {
1753
+ "epoch": 3.370731707317073,
1754
+ "grad_norm": 0.35117048748443935,
1755
+ "learning_rate": 2.4652007309320498e-05,
1756
+ "loss": 0.4383,
1757
+ "num_input_tokens_seen": 67861728,
1758
+ "step": 175,
1759
+ "train_runtime": 14783.5396,
1760
+ "train_tokens_per_second": 4590.357
1761
+ },
1762
+ {
1763
+ "epoch": 3.3902439024390243,
1764
+ "grad_norm": 0.3122289166658474,
1765
+ "learning_rate": 2.4133109291117156e-05,
1766
+ "loss": 0.4447,
1767
+ "num_input_tokens_seen": 68254560,
1768
+ "step": 176,
1769
+ "train_runtime": 14868.9762,
1770
+ "train_tokens_per_second": 4590.401
1771
+ },
1772
+ {
1773
+ "epoch": 3.4097560975609755,
1774
+ "grad_norm": 0.3521930408336,
1775
+ "learning_rate": 2.361798779469336e-05,
1776
+ "loss": 0.442,
1777
+ "num_input_tokens_seen": 68647392,
1778
+ "step": 177,
1779
+ "train_runtime": 14954.4517,
1780
+ "train_tokens_per_second": 4590.432
1781
+ },
1782
+ {
1783
+ "epoch": 3.4292682926829268,
1784
+ "grad_norm": 0.3077364343260739,
1785
+ "learning_rate": 2.3106718026906072e-05,
1786
+ "loss": 0.4418,
1787
+ "num_input_tokens_seen": 69040224,
1788
+ "step": 178,
1789
+ "train_runtime": 15039.9655,
1790
+ "train_tokens_per_second": 4590.451
1791
+ },
1792
+ {
1793
+ "epoch": 3.448780487804878,
1794
+ "grad_norm": 0.33010430660817575,
1795
+ "learning_rate": 2.259937463226651e-05,
1796
+ "loss": 0.4325,
1797
+ "num_input_tokens_seen": 69433056,
1798
+ "step": 179,
1799
+ "train_runtime": 15125.4865,
1800
+ "train_tokens_per_second": 4590.468
1801
+ },
1802
+ {
1803
+ "epoch": 3.4682926829268292,
1804
+ "grad_norm": 0.325795518062987,
1805
+ "learning_rate": 2.209603168204209e-05,
1806
+ "loss": 0.4319,
1807
+ "num_input_tokens_seen": 69825888,
1808
+ "step": 180,
1809
+ "train_runtime": 15211.012,
1810
+ "train_tokens_per_second": 4590.483
1811
+ },
1812
+ {
1813
+ "epoch": 3.4878048780487805,
1814
+ "grad_norm": 0.3179877620527606,
1815
+ "learning_rate": 2.1596762663442218e-05,
1816
+ "loss": 0.4401,
1817
+ "num_input_tokens_seen": 70218720,
1818
+ "step": 181,
1819
+ "train_runtime": 15296.5238,
1820
+ "train_tokens_per_second": 4590.502
1821
+ },
1822
+ {
1823
+ "epoch": 3.5073170731707317,
1824
+ "grad_norm": 0.3287765696670568,
1825
+ "learning_rate": 2.1101640468889255e-05,
1826
+ "loss": 0.4386,
1827
+ "num_input_tokens_seen": 70611552,
1828
+ "step": 182,
1829
+ "train_runtime": 15382.0311,
1830
+ "train_tokens_per_second": 4590.522
1831
+ },
1832
+ {
1833
+ "epoch": 3.526829268292683,
1834
+ "grad_norm": 0.31971467799401765,
1835
+ "learning_rate": 2.061073738537635e-05,
1836
+ "loss": 0.4202,
1837
+ "num_input_tokens_seen": 71004384,
1838
+ "step": 183,
1839
+ "train_runtime": 15467.521,
1840
+ "train_tokens_per_second": 4590.547
1841
+ },
1842
+ {
1843
+ "epoch": 3.546341463414634,
1844
+ "grad_norm": 0.3239181143254052,
1845
+ "learning_rate": 2.0124125083913637e-05,
1846
+ "loss": 0.4419,
1847
+ "num_input_tokens_seen": 71397216,
1848
+ "step": 184,
1849
+ "train_runtime": 15553.2048,
1850
+ "train_tokens_per_second": 4590.515
1851
+ },
1852
+ {
1853
+ "epoch": 3.5658536585365854,
1854
+ "grad_norm": 0.3040342295851482,
1855
+ "learning_rate": 1.9641874609064443e-05,
1856
+ "loss": 0.4336,
1857
+ "num_input_tokens_seen": 71790048,
1858
+ "step": 185,
1859
+ "train_runtime": 15638.8236,
1860
+ "train_tokens_per_second": 4590.502
1861
+ },
1862
+ {
1863
+ "epoch": 3.5853658536585367,
1864
+ "grad_norm": 0.3144952842609461,
1865
+ "learning_rate": 1.9164056368572846e-05,
1866
+ "loss": 0.4387,
1867
+ "num_input_tokens_seen": 72182880,
1868
+ "step": 186,
1869
+ "train_runtime": 15724.5242,
1870
+ "train_tokens_per_second": 4590.465
1871
+ },
1872
+ {
1873
+ "epoch": 3.604878048780488,
1874
+ "grad_norm": 0.31728573983048836,
1875
+ "learning_rate": 1.8690740123084316e-05,
1876
+ "loss": 0.4297,
1877
+ "num_input_tokens_seen": 72575712,
1878
+ "step": 187,
1879
+ "train_runtime": 15810.1041,
1880
+ "train_tokens_per_second": 4590.464
1881
+ },
1882
+ {
1883
+ "epoch": 3.624390243902439,
1884
+ "grad_norm": 0.35290725632142694,
1885
+ "learning_rate": 1.8221994975960736e-05,
1886
+ "loss": 0.446,
1887
+ "num_input_tokens_seen": 72968544,
1888
+ "step": 188,
1889
+ "train_runtime": 15895.683,
1890
+ "train_tokens_per_second": 4590.463
1891
+ },
1892
+ {
1893
+ "epoch": 3.6439024390243904,
1894
+ "grad_norm": 0.31295107237929615,
1895
+ "learning_rate": 1.7757889363191483e-05,
1896
+ "loss": 0.4318,
1897
+ "num_input_tokens_seen": 73361376,
1898
+ "step": 189,
1899
+ "train_runtime": 15981.3241,
1900
+ "train_tokens_per_second": 4590.444
1901
+ },
1902
+ {
1903
+ "epoch": 3.6634146341463416,
1904
+ "grad_norm": 0.30434189898649333,
1905
+ "learning_rate": 1.7298491043401795e-05,
1906
+ "loss": 0.4393,
1907
+ "num_input_tokens_seen": 73754208,
1908
+ "step": 190,
1909
+ "train_runtime": 16066.87,
1910
+ "train_tokens_per_second": 4590.453
1911
+ },
1912
+ {
1913
+ "epoch": 3.682926829268293,
1914
+ "grad_norm": 0.3000645994101256,
1915
+ "learning_rate": 1.684386708796025e-05,
1916
+ "loss": 0.4437,
1917
+ "num_input_tokens_seen": 74147040,
1918
+ "step": 191,
1919
+ "train_runtime": 16152.3518,
1920
+ "train_tokens_per_second": 4590.48
1921
+ },
1922
+ {
1923
+ "epoch": 3.7024390243902436,
1924
+ "grad_norm": 0.3161645570263139,
1925
+ "learning_rate": 1.6394083871186362e-05,
1926
+ "loss": 0.4378,
1927
+ "num_input_tokens_seen": 74539872,
1928
+ "step": 192,
1929
+ "train_runtime": 16237.7807,
1930
+ "train_tokens_per_second": 4590.521
1931
+ },
1932
+ {
1933
+ "epoch": 3.721951219512195,
1934
+ "grad_norm": 0.3085852095656564,
1935
+ "learning_rate": 1.5949207060660138e-05,
1936
+ "loss": 0.4388,
1937
+ "num_input_tokens_seen": 74932704,
1938
+ "step": 193,
1939
+ "train_runtime": 16323.3128,
1940
+ "train_tokens_per_second": 4590.533
1941
+ },
1942
+ {
1943
+ "epoch": 3.741463414634146,
1944
+ "grad_norm": 0.3214095905441243,
1945
+ "learning_rate": 1.550930160763462e-05,
1946
+ "loss": 0.4483,
1947
+ "num_input_tokens_seen": 75325536,
1948
+ "step": 194,
1949
+ "train_runtime": 16408.7376,
1950
+ "train_tokens_per_second": 4590.575
1951
+ },
1952
+ {
1953
+ "epoch": 3.7609756097560973,
1954
+ "grad_norm": 0.3029571828988472,
1955
+ "learning_rate": 1.5074431737553157e-05,
1956
+ "loss": 0.4336,
1957
+ "num_input_tokens_seen": 75718368,
1958
+ "step": 195,
1959
+ "train_runtime": 16494.1425,
1960
+ "train_tokens_per_second": 4590.622
1961
+ },
1962
+ {
1963
+ "epoch": 3.7804878048780486,
1964
+ "grad_norm": 0.3002722038512263,
1965
+ "learning_rate": 1.4644660940672627e-05,
1966
+ "loss": 0.4334,
1967
+ "num_input_tokens_seen": 76111200,
1968
+ "step": 196,
1969
+ "train_runtime": 16579.5744,
1970
+ "train_tokens_per_second": 4590.661
1971
+ },
1972
+ {
1973
+ "epoch": 3.8,
1974
+ "grad_norm": 0.30486554391008086,
1975
+ "learning_rate": 1.422005196279395e-05,
1976
+ "loss": 0.4352,
1977
+ "num_input_tokens_seen": 76504032,
1978
+ "step": 197,
1979
+ "train_runtime": 16665.0963,
1980
+ "train_tokens_per_second": 4590.674
1981
+ },
1982
+ {
1983
+ "epoch": 3.819512195121951,
1984
+ "grad_norm": 0.3019225596466138,
1985
+ "learning_rate": 1.3800666796101292e-05,
1986
+ "loss": 0.4315,
1987
+ "num_input_tokens_seen": 76896864,
1988
+ "step": 198,
1989
+ "train_runtime": 16750.5821,
1990
+ "train_tokens_per_second": 4590.698
1991
+ },
1992
+ {
1993
+ "epoch": 3.8390243902439023,
1994
+ "grad_norm": 0.29879067901515843,
1995
+ "learning_rate": 1.338656667011134e-05,
1996
+ "loss": 0.4339,
1997
+ "num_input_tokens_seen": 77289696,
1998
+ "step": 199,
1999
+ "train_runtime": 16836.0019,
2000
+ "train_tokens_per_second": 4590.739
2001
+ },
2002
+ {
2003
+ "epoch": 3.8585365853658535,
2004
+ "grad_norm": 0.3064182382633002,
2005
+ "learning_rate": 1.297781204273385e-05,
2006
+ "loss": 0.4374,
2007
+ "num_input_tokens_seen": 77682528,
2008
+ "step": 200,
2009
+ "train_runtime": 16921.5579,
2010
+ "train_tokens_per_second": 4590.743
2011
+ },
2012
+ {
2013
+ "epoch": 3.8780487804878048,
2014
+ "grad_norm": 0.31545962722826715,
2015
+ "learning_rate": 1.257446259144494e-05,
2016
+ "loss": 0.4437,
2017
+ "num_input_tokens_seen": 78075360,
2018
+ "step": 201,
2019
+ "train_runtime": 17007.1429,
2020
+ "train_tokens_per_second": 4590.739
2021
+ },
2022
+ {
2023
+ "epoch": 3.897560975609756,
2024
+ "grad_norm": 0.3062038073298703,
2025
+ "learning_rate": 1.2176577204574318e-05,
2026
+ "loss": 0.4319,
2027
+ "num_input_tokens_seen": 78468192,
2028
+ "step": 202,
2029
+ "train_runtime": 17092.564,
2030
+ "train_tokens_per_second": 4590.779
2031
+ },
2032
+ {
2033
+ "epoch": 3.9170731707317072,
2034
+ "grad_norm": 0.4440664308923016,
2035
+ "learning_rate": 1.178421397270758e-05,
2036
+ "loss": 0.4322,
2037
+ "num_input_tokens_seen": 78861024,
2038
+ "step": 203,
2039
+ "train_runtime": 17178.0234,
2040
+ "train_tokens_per_second": 4590.809
2041
+ },
2042
+ {
2043
+ "epoch": 3.9365853658536585,
2044
+ "grad_norm": 0.3017158051173835,
2045
+ "learning_rate": 1.1397430180205171e-05,
2046
+ "loss": 0.4375,
2047
+ "num_input_tokens_seen": 79253856,
2048
+ "step": 204,
2049
+ "train_runtime": 17263.8463,
2050
+ "train_tokens_per_second": 4590.742
2051
+ },
2052
+ {
2053
+ "epoch": 3.9560975609756097,
2054
+ "grad_norm": 0.3075889998265315,
2055
+ "learning_rate": 1.1016282296838887e-05,
2056
+ "loss": 0.4182,
2057
+ "num_input_tokens_seen": 79646688,
2058
+ "step": 205,
2059
+ "train_runtime": 17349.3107,
2060
+ "train_tokens_per_second": 4590.77
2061
+ },
2062
+ {
2063
+ "epoch": 3.975609756097561,
2064
+ "grad_norm": 0.30086206726131764,
2065
+ "learning_rate": 1.0640825969547496e-05,
2066
+ "loss": 0.4382,
2067
+ "num_input_tokens_seen": 80039520,
2068
+ "step": 206,
2069
+ "train_runtime": 17434.78,
2070
+ "train_tokens_per_second": 4590.796
2071
+ },
2072
+ {
2073
+ "epoch": 3.995121951219512,
2074
+ "grad_norm": 0.30615268970962656,
2075
+ "learning_rate": 1.0271116014312293e-05,
2076
+ "loss": 0.4277,
2077
+ "num_input_tokens_seen": 80432352,
2078
+ "step": 207,
2079
+ "train_runtime": 17520.2702,
2080
+ "train_tokens_per_second": 4590.817
2081
+ },
2082
+ {
2083
+ "epoch": 4.0,
2084
+ "grad_norm": 0.5575452767715656,
2085
+ "learning_rate": 9.90720640815408e-06,
2086
+ "loss": 0.4029,
2087
+ "num_input_tokens_seen": 80530560,
2088
+ "step": 208,
2089
+ "train_runtime": 17541.5926,
2090
+ "train_tokens_per_second": 4590.835
2091
+ },
2092
+ {
2093
+ "epoch": 4.019512195121951,
2094
+ "grad_norm": 0.30272512353274567,
2095
+ "learning_rate": 9.549150281252633e-06,
2096
+ "loss": 0.4243,
2097
+ "num_input_tokens_seen": 80923392,
2098
+ "step": 209,
2099
+ "train_runtime": 17627.0549,
2100
+ "train_tokens_per_second": 4590.863
2101
+ },
2102
+ {
2103
+ "epoch": 4.0390243902439025,
2104
+ "grad_norm": 0.30162450457883366,
2105
+ "learning_rate": 9.196999909189762e-06,
2106
+ "loss": 0.4146,
2107
+ "num_input_tokens_seen": 81316224,
2108
+ "step": 210,
2109
+ "train_runtime": 17713.1477,
2110
+ "train_tokens_per_second": 4590.727
2111
+ },
2112
+ {
2113
+ "epoch": 4.058536585365854,
2114
+ "grad_norm": 0.30526033181573403,
2115
+ "learning_rate": 8.850806705317183e-06,
2116
+ "loss": 0.4287,
2117
+ "num_input_tokens_seen": 81709056,
2118
+ "step": 211,
2119
+ "train_runtime": 17798.6213,
2120
+ "train_tokens_per_second": 4590.752
2121
+ },
2122
+ {
2123
+ "epoch": 4.078048780487805,
2124
+ "grad_norm": 0.3107654508084313,
2125
+ "learning_rate": 8.510621213250247e-06,
2126
+ "loss": 0.4221,
2127
+ "num_input_tokens_seen": 82101888,
2128
+ "step": 212,
2129
+ "train_runtime": 17884.1454,
2130
+ "train_tokens_per_second": 4590.764
2131
+ },
2132
+ {
2133
+ "epoch": 4.097560975609756,
2134
+ "grad_norm": 0.325160265062242,
2135
+ "learning_rate": 8.176493099488663e-06,
2136
+ "loss": 0.4232,
2137
+ "num_input_tokens_seen": 82494720,
2138
+ "step": 213,
2139
+ "train_runtime": 17969.6704,
2140
+ "train_tokens_per_second": 4590.775
2141
+ },
2142
+ {
2143
+ "epoch": 4.117073170731707,
2144
+ "grad_norm": 0.3137465946251142,
2145
+ "learning_rate": 7.848471146165288e-06,
2146
+ "loss": 0.416,
2147
+ "num_input_tokens_seen": 82887552,
2148
+ "step": 214,
2149
+ "train_runtime": 18055.2394,
2150
+ "train_tokens_per_second": 4590.776
2151
+ },
2152
+ {
2153
+ "epoch": 4.136585365853659,
2154
+ "grad_norm": 0.31293023871568965,
2155
+ "learning_rate": 7.526603243923957e-06,
2156
+ "loss": 0.4157,
2157
+ "num_input_tokens_seen": 83280384,
2158
+ "step": 215,
2159
+ "train_runtime": 18140.8949,
2160
+ "train_tokens_per_second": 4590.754
2161
+ },
2162
+ {
2163
+ "epoch": 4.15609756097561,
2164
+ "grad_norm": 0.31719664973482375,
2165
+ "learning_rate": 7.21093638492763e-06,
2166
+ "loss": 0.4174,
2167
+ "num_input_tokens_seen": 83673216,
2168
+ "step": 216,
2169
+ "train_runtime": 18226.5244,
2170
+ "train_tokens_per_second": 4590.739
2171
+ },
2172
+ {
2173
+ "epoch": 4.175609756097561,
2174
+ "grad_norm": 0.3958840925349482,
2175
+ "learning_rate": 6.901516655997536e-06,
2176
+ "loss": 0.4041,
2177
+ "num_input_tokens_seen": 84066048,
2178
+ "step": 217,
2179
+ "train_runtime": 18312.181,
2180
+ "train_tokens_per_second": 4590.717
2181
+ },
2182
+ {
2183
+ "epoch": 4.195121951219512,
2184
+ "grad_norm": 0.30791697730474177,
2185
+ "learning_rate": 6.5983892318846275e-06,
2186
+ "loss": 0.4155,
2187
+ "num_input_tokens_seen": 84458880,
2188
+ "step": 218,
2189
+ "train_runtime": 18397.8319,
2190
+ "train_tokens_per_second": 4590.697
2191
+ },
2192
+ {
2193
+ "epoch": 4.214634146341464,
2194
+ "grad_norm": 0.3034741442256561,
2195
+ "learning_rate": 6.301598368674105e-06,
2196
+ "loss": 0.4214,
2197
+ "num_input_tokens_seen": 84851712,
2198
+ "step": 219,
2199
+ "train_runtime": 18483.4309,
2200
+ "train_tokens_per_second": 4590.691
2201
+ },
2202
+ {
2203
+ "epoch": 4.234146341463415,
2204
+ "grad_norm": 0.32535838114327126,
2205
+ "learning_rate": 6.011187397324114e-06,
2206
+ "loss": 0.4193,
2207
+ "num_input_tokens_seen": 85244544,
2208
+ "step": 220,
2209
+ "train_runtime": 18569.0539,
2210
+ "train_tokens_per_second": 4590.678
2211
+ },
2212
+ {
2213
+ "epoch": 4.253658536585366,
2214
+ "grad_norm": 0.3033400736842611,
2215
+ "learning_rate": 5.727198717339511e-06,
2216
+ "loss": 0.4137,
2217
+ "num_input_tokens_seen": 85637376,
2218
+ "step": 221,
2219
+ "train_runtime": 18654.7358,
2220
+ "train_tokens_per_second": 4590.651
2221
+ },
2222
+ {
2223
+ "epoch": 4.273170731707317,
2224
+ "grad_norm": 0.3293082877684599,
2225
+ "learning_rate": 5.449673790581611e-06,
2226
+ "loss": 0.4069,
2227
+ "num_input_tokens_seen": 86030208,
2228
+ "step": 222,
2229
+ "train_runtime": 18740.4478,
2230
+ "train_tokens_per_second": 4590.616
2231
+ },
2232
+ {
2233
+ "epoch": 4.2926829268292686,
2234
+ "grad_norm": 0.2994148389025656,
2235
+ "learning_rate": 5.178653135214812e-06,
2236
+ "loss": 0.4188,
2237
+ "num_input_tokens_seen": 86423040,
2238
+ "step": 223,
2239
+ "train_runtime": 18826.1093,
2240
+ "train_tokens_per_second": 4590.595
2241
+ },
2242
+ {
2243
+ "epoch": 4.31219512195122,
2244
+ "grad_norm": 0.3140763914390958,
2245
+ "learning_rate": 4.914176319791036e-06,
2246
+ "loss": 0.4133,
2247
+ "num_input_tokens_seen": 86815872,
2248
+ "step": 224,
2249
+ "train_runtime": 18911.7783,
2250
+ "train_tokens_per_second": 4590.572
2251
+ },
2252
+ {
2253
+ "epoch": 4.331707317073171,
2254
+ "grad_norm": 0.3058594326378288,
2255
+ "learning_rate": 4.65628195747273e-06,
2256
+ "loss": 0.4136,
2257
+ "num_input_tokens_seen": 87208704,
2258
+ "step": 225,
2259
+ "train_runtime": 18997.5405,
2260
+ "train_tokens_per_second": 4590.526
2261
+ },
2262
+ {
2263
+ "epoch": 4.351219512195122,
2264
+ "grad_norm": 0.3036952677463643,
2265
+ "learning_rate": 4.405007700395497e-06,
2266
+ "loss": 0.4066,
2267
+ "num_input_tokens_seen": 87601536,
2268
+ "step": 226,
2269
+ "train_runtime": 19083.2658,
2270
+ "train_tokens_per_second": 4590.49
2271
+ },
2272
+ {
2273
+ "epoch": 4.3707317073170735,
2274
+ "grad_norm": 0.2986697055188429,
2275
+ "learning_rate": 4.16039023417088e-06,
2276
+ "loss": 0.4002,
2277
+ "num_input_tokens_seen": 87994368,
2278
+ "step": 227,
2279
+ "train_runtime": 19168.9792,
2280
+ "train_tokens_per_second": 4590.457
2281
+ },
2282
+ {
2283
+ "epoch": 4.390243902439025,
2284
+ "grad_norm": 0.30912351105330554,
2285
+ "learning_rate": 3.922465272530351e-06,
2286
+ "loss": 0.4213,
2287
+ "num_input_tokens_seen": 88387200,
2288
+ "step": 228,
2289
+ "train_runtime": 19254.8776,
2290
+ "train_tokens_per_second": 4590.38
2291
+ },
2292
+ {
2293
+ "epoch": 4.409756097560976,
2294
+ "grad_norm": 0.31390545996696395,
2295
+ "learning_rate": 3.691267552111183e-06,
2296
+ "loss": 0.4276,
2297
+ "num_input_tokens_seen": 88780032,
2298
+ "step": 229,
2299
+ "train_runtime": 19340.5507,
2300
+ "train_tokens_per_second": 4590.357
2301
+ },
2302
+ {
2303
+ "epoch": 4.429268292682927,
2304
+ "grad_norm": 0.3064931328623729,
2305
+ "learning_rate": 3.4668308273848982e-06,
2306
+ "loss": 0.417,
2307
+ "num_input_tokens_seen": 89172864,
2308
+ "step": 230,
2309
+ "train_runtime": 19426.2001,
2310
+ "train_tokens_per_second": 4590.34
2311
+ },
2312
+ {
2313
+ "epoch": 4.4487804878048784,
2314
+ "grad_norm": 0.2938965818242203,
2315
+ "learning_rate": 3.249187865729264e-06,
2316
+ "loss": 0.4036,
2317
+ "num_input_tokens_seen": 89565696,
2318
+ "step": 231,
2319
+ "train_runtime": 19511.8505,
2320
+ "train_tokens_per_second": 4590.323
2321
+ },
2322
+ {
2323
+ "epoch": 4.46829268292683,
2324
+ "grad_norm": 0.3009741802462199,
2325
+ "learning_rate": 3.0383704426442394e-06,
2326
+ "loss": 0.4109,
2327
+ "num_input_tokens_seen": 89958528,
2328
+ "step": 232,
2329
+ "train_runtime": 19597.5003,
2330
+ "train_tokens_per_second": 4590.306
2331
+ },
2332
+ {
2333
+ "epoch": 4.487804878048781,
2334
+ "grad_norm": 0.30572185629569554,
2335
+ "learning_rate": 2.8344093371128424e-06,
2336
+ "loss": 0.4177,
2337
+ "num_input_tokens_seen": 90351360,
2338
+ "step": 233,
2339
+ "train_runtime": 19683.1546,
2340
+ "train_tokens_per_second": 4590.289
2341
+ },
2342
+ {
2343
+ "epoch": 4.507317073170732,
2344
+ "grad_norm": 0.3060090032156781,
2345
+ "learning_rate": 2.637334327107466e-06,
2346
+ "loss": 0.4167,
2347
+ "num_input_tokens_seen": 90744192,
2348
+ "step": 234,
2349
+ "train_runtime": 19768.7311,
2350
+ "train_tokens_per_second": 4590.289
2351
+ },
2352
+ {
2353
+ "epoch": 4.526829268292683,
2354
+ "grad_norm": 0.30158648786459336,
2355
+ "learning_rate": 2.4471741852423237e-06,
2356
+ "loss": 0.4214,
2357
+ "num_input_tokens_seen": 91137024,
2358
+ "step": 235,
2359
+ "train_runtime": 19854.2512,
2360
+ "train_tokens_per_second": 4590.303
2361
+ },
2362
+ {
2363
+ "epoch": 4.546341463414635,
2364
+ "grad_norm": 0.30106548477413225,
2365
+ "learning_rate": 2.2639566745727205e-06,
2366
+ "loss": 0.4038,
2367
+ "num_input_tokens_seen": 91529856,
2368
+ "step": 236,
2369
+ "train_runtime": 19939.85,
2370
+ "train_tokens_per_second": 4590.298
2371
+ },
2372
+ {
2373
+ "epoch": 4.565853658536585,
2374
+ "grad_norm": 0.31280608719577163,
2375
+ "learning_rate": 2.087708544541689e-06,
2376
+ "loss": 0.4087,
2377
+ "num_input_tokens_seen": 91922688,
2378
+ "step": 237,
2379
+ "train_runtime": 20025.4609,
2380
+ "train_tokens_per_second": 4590.291
2381
+ },
2382
+ {
2383
+ "epoch": 4.585365853658536,
2384
+ "grad_norm": 0.3151237023853444,
2385
+ "learning_rate": 1.9184555270746194e-06,
2386
+ "loss": 0.4099,
2387
+ "num_input_tokens_seen": 92315520,
2388
+ "step": 238,
2389
+ "train_runtime": 20111.0379,
2390
+ "train_tokens_per_second": 4590.291
2391
+ },
2392
+ {
2393
+ "epoch": 4.6048780487804875,
2394
+ "grad_norm": 0.2979822004749027,
2395
+ "learning_rate": 1.7562223328224325e-06,
2396
+ "loss": 0.4098,
2397
+ "num_input_tokens_seen": 92708352,
2398
+ "step": 239,
2399
+ "train_runtime": 20196.6063,
2400
+ "train_tokens_per_second": 4590.294
2401
+ },
2402
+ {
2403
+ "epoch": 4.624390243902439,
2404
+ "grad_norm": 0.29641279383853864,
2405
+ "learning_rate": 1.601032647553863e-06,
2406
+ "loss": 0.4106,
2407
+ "num_input_tokens_seen": 93101184,
2408
+ "step": 240,
2409
+ "train_runtime": 20282.2193,
2410
+ "train_tokens_per_second": 4590.286
2411
+ },
2412
+ {
2413
+ "epoch": 4.64390243902439,
2414
+ "grad_norm": 0.30405930218468175,
2415
+ "learning_rate": 1.4529091286973995e-06,
2416
+ "loss": 0.4204,
2417
+ "num_input_tokens_seen": 93494016,
2418
+ "step": 241,
2419
+ "train_runtime": 20367.7887,
2420
+ "train_tokens_per_second": 4590.288
2421
+ },
2422
+ {
2423
+ "epoch": 4.663414634146341,
2424
+ "grad_norm": 0.30816493741451845,
2425
+ "learning_rate": 1.3118734020333256e-06,
2426
+ "loss": 0.4069,
2427
+ "num_input_tokens_seen": 93886848,
2428
+ "step": 242,
2429
+ "train_runtime": 20453.3944,
2430
+ "train_tokens_per_second": 4590.282
2431
+ },
2432
+ {
2433
+ "epoch": 4.682926829268292,
2434
+ "grad_norm": 0.295889585872778,
2435
+ "learning_rate": 1.1779460585363944e-06,
2436
+ "loss": 0.4206,
2437
+ "num_input_tokens_seen": 94279680,
2438
+ "step": 243,
2439
+ "train_runtime": 20538.896,
2440
+ "train_tokens_per_second": 4590.299
2441
+ },
2442
+ {
2443
+ "epoch": 4.702439024390244,
2444
+ "grad_norm": 0.30064754265380783,
2445
+ "learning_rate": 1.0511466513695777e-06,
2446
+ "loss": 0.4216,
2447
+ "num_input_tokens_seen": 94672512,
2448
+ "step": 244,
2449
+ "train_runtime": 20624.455,
2450
+ "train_tokens_per_second": 4590.304
2451
+ },
2452
+ {
2453
+ "epoch": 4.721951219512195,
2454
+ "grad_norm": 0.2956056728342269,
2455
+ "learning_rate": 9.314936930293283e-07,
2456
+ "loss": 0.4067,
2457
+ "num_input_tokens_seen": 95065344,
2458
+ "step": 245,
2459
+ "train_runtime": 20710.4849,
2460
+ "train_tokens_per_second": 4590.204
2461
+ },
2462
+ {
2463
+ "epoch": 4.741463414634146,
2464
+ "grad_norm": 1.2801159100545316,
2465
+ "learning_rate": 8.190046526428242e-07,
2466
+ "loss": 0.4149,
2467
+ "num_input_tokens_seen": 95458176,
2468
+ "step": 246,
2469
+ "train_runtime": 20796.2392,
2470
+ "train_tokens_per_second": 4590.165
2471
+ },
2472
+ {
2473
+ "epoch": 4.760975609756097,
2474
+ "grad_norm": 0.2962901875713787,
2475
+ "learning_rate": 7.136959534174592e-07,
2476
+ "loss": 0.4104,
2477
+ "num_input_tokens_seen": 95851008,
2478
+ "step": 247,
2479
+ "train_runtime": 20881.9173,
2480
+ "train_tokens_per_second": 4590.144
2481
+ },
2482
+ {
2483
+ "epoch": 4.780487804878049,
2484
+ "grad_norm": 0.29775756656133795,
2485
+ "learning_rate": 6.15582970243117e-07,
2486
+ "loss": 0.4275,
2487
+ "num_input_tokens_seen": 96243840,
2488
+ "step": 248,
2489
+ "train_runtime": 20967.6297,
2490
+ "train_tokens_per_second": 4590.115
2491
+ },
2492
+ {
2493
+ "epoch": 4.8,
2494
+ "grad_norm": 0.30152743001548155,
2495
+ "learning_rate": 5.246800274474439e-07,
2496
+ "loss": 0.4168,
2497
+ "num_input_tokens_seen": 96636672,
2498
+ "step": 249,
2499
+ "train_runtime": 21053.3487,
2500
+ "train_tokens_per_second": 4590.086
2501
+ },
2502
+ {
2503
+ "epoch": 4.819512195121951,
2504
+ "grad_norm": 0.3006544700138266,
2505
+ "learning_rate": 4.4100039670454395e-07,
2506
+ "loss": 0.4227,
2507
+ "num_input_tokens_seen": 97029504,
2508
+ "step": 250,
2509
+ "train_runtime": 21139.1446,
2510
+ "train_tokens_per_second": 4590.039
2511
+ },
2512
+ {
2513
+ "epoch": 4.839024390243902,
2514
+ "grad_norm": 0.3694815192213994,
2515
+ "learning_rate": 3.6455629509730136e-07,
2516
+ "loss": 0.4224,
2517
+ "num_input_tokens_seen": 97422336,
2518
+ "step": 251,
2519
+ "train_runtime": 21224.947,
2520
+ "train_tokens_per_second": 4589.992
2521
+ },
2522
+ {
2523
+ "epoch": 4.8585365853658535,
2524
+ "grad_norm": 0.29849891867672973,
2525
+ "learning_rate": 2.953588833337406e-07,
2526
+ "loss": 0.409,
2527
+ "num_input_tokens_seen": 97815168,
2528
+ "step": 252,
2529
+ "train_runtime": 21311.0603,
2530
+ "train_tokens_per_second": 4589.878
2531
+ },
2532
+ {
2533
+ "epoch": 4.878048780487805,
2534
+ "grad_norm": 0.29318102693895126,
2535
+ "learning_rate": 2.334182641175686e-07,
2536
+ "loss": 0.4164,
2537
+ "num_input_tokens_seen": 98208000,
2538
+ "step": 253,
2539
+ "train_runtime": 21396.8922,
2540
+ "train_tokens_per_second": 4589.825
2541
+ },
2542
+ {
2543
+ "epoch": 4.897560975609756,
2544
+ "grad_norm": 0.29392220191104595,
2545
+ "learning_rate": 1.7874348067319912e-07,
2546
+ "loss": 0.4048,
2547
+ "num_input_tokens_seen": 98600832,
2548
+ "step": 254,
2549
+ "train_runtime": 21482.6378,
2550
+ "train_tokens_per_second": 4589.792
2551
+ },
2552
+ {
2553
+ "epoch": 4.917073170731707,
2554
+ "grad_norm": 0.29522364415867186,
2555
+ "learning_rate": 1.3134251542544774e-07,
2556
+ "loss": 0.4082,
2557
+ "num_input_tokens_seen": 98993664,
2558
+ "step": 255,
2559
+ "train_runtime": 21568.4139,
2560
+ "train_tokens_per_second": 4589.752
2561
+ },
2562
+ {
2563
+ "epoch": 4.9365853658536585,
2564
+ "grad_norm": 0.29818003556233796,
2565
+ "learning_rate": 9.12222888341252e-08,
2566
+ "loss": 0.4214,
2567
+ "num_input_tokens_seen": 99386496,
2568
+ "step": 256,
2569
+ "train_runtime": 21654.0911,
2570
+ "train_tokens_per_second": 4589.733
2571
+ },
2572
+ {
2573
+ "epoch": 4.95609756097561,
2574
+ "grad_norm": 0.29688566274903394,
2575
+ "learning_rate": 5.838865838366792e-08,
2576
+ "loss": 0.413,
2577
+ "num_input_tokens_seen": 99779328,
2578
+ "step": 257,
2579
+ "train_runtime": 21739.8502,
2580
+ "train_tokens_per_second": 4589.697
2581
+ },
2582
+ {
2583
+ "epoch": 4.975609756097561,
2584
+ "grad_norm": 0.2972951381912236,
2585
+ "learning_rate": 3.284641772793862e-08,
2586
+ "loss": 0.4118,
2587
+ "num_input_tokens_seen": 100172160,
2588
+ "step": 258,
2589
+ "train_runtime": 21825.6482,
2590
+ "train_tokens_per_second": 4589.653
2591
+ },
2592
+ {
2593
+ "epoch": 4.995121951219512,
2594
+ "grad_norm": 0.2934977397098327,
2595
+ "learning_rate": 1.4599295990352924e-08,
2596
+ "loss": 0.4169,
2597
+ "num_input_tokens_seen": 100564992,
2598
+ "step": 259,
2599
+ "train_runtime": 21911.3681,
2600
+ "train_tokens_per_second": 4589.626
2601
+ },
2602
+ {
2603
+ "epoch": 5.0,
2604
+ "grad_norm": 0.598332221412963,
2605
+ "learning_rate": 3.6499572194648167e-09,
2606
+ "loss": 0.3937,
2607
+ "num_input_tokens_seen": 100663200,
2608
+ "step": 260,
2609
+ "train_runtime": 21932.7813,
2610
+ "train_tokens_per_second": 4589.623
2611
+ }
2612
+ ],
2613
+ "logging_steps": 1,
2614
+ "max_steps": 260,
2615
+ "num_input_tokens_seen": 100663200,
2616
+ "num_train_epochs": 5,
2617
+ "save_steps": 1000,
2618
+ "stateful_callbacks": {
2619
+ "TrainerControl": {
2620
+ "args": {
2621
+ "should_epoch_stop": false,
2622
+ "should_evaluate": false,
2623
+ "should_log": false,
2624
+ "should_save": true,
2625
+ "should_training_stop": true
2626
+ },
2627
+ "attributes": {}
2628
+ }
2629
+ },
2630
+ "total_flos": 2377568804143104.0,
2631
+ "train_batch_size": 24,
2632
+ "trial_name": null,
2633
+ "trial_params": null
2634
+ }
checkpoint-260/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d28367a7f01f97d093d01f03db18770fc51c71f24731638d1414a6bbecdecf30
3
+ size 8081
checkpoint-260/zero_to_fp32.py ADDED
@@ -0,0 +1,760 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Microsoft Corporation.
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ # DeepSpeed Team
7
+
8
+ # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
+ # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
+ # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
+ # application.
12
+ #
13
+ # example:
14
+ # python zero_to_fp32.py . output_dir/
15
+ # or
16
+ # python zero_to_fp32.py . output_dir/ --safe_serialization
17
+
18
+ import argparse
19
+ import torch
20
+ import glob
21
+ import math
22
+ import os
23
+ import re
24
+ import gc
25
+ import json
26
+ import numpy as np
27
+ from tqdm import tqdm
28
+ from collections import OrderedDict
29
+ from dataclasses import dataclass
30
+
31
+ # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
32
+ # DeepSpeed data structures it has to be available in the current python environment.
33
+ from deepspeed.utils import logger
34
+ from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
35
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
36
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
37
+
38
+
39
+ @dataclass
40
+ class zero_model_state:
41
+ buffers: dict()
42
+ param_shapes: dict()
43
+ shared_params: list
44
+ ds_version: int
45
+ frozen_param_shapes: dict()
46
+ frozen_param_fragments: dict()
47
+
48
+
49
+ debug = 0
50
+
51
+ # load to cpu
52
+ device = torch.device('cpu')
53
+
54
+
55
+ def atoi(text):
56
+ return int(text) if text.isdigit() else text
57
+
58
+
59
+ def natural_keys(text):
60
+ '''
61
+ alist.sort(key=natural_keys) sorts in human order
62
+ http://nedbatchelder.com/blog/200712/human_sorting.html
63
+ (See Toothy's implementation in the comments)
64
+ '''
65
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
66
+
67
+
68
+ def get_model_state_file(checkpoint_dir, zero_stage):
69
+ if not os.path.isdir(checkpoint_dir):
70
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
71
+
72
+ # there should be only one file
73
+ if zero_stage <= 2:
74
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
75
+ elif zero_stage == 3:
76
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
77
+
78
+ if not os.path.exists(file):
79
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
80
+
81
+ return file
82
+
83
+
84
+ def get_checkpoint_files(checkpoint_dir, glob_pattern):
85
+ # XXX: need to test that this simple glob rule works for multi-node setup too
86
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
87
+
88
+ if len(ckpt_files) == 0:
89
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
90
+
91
+ return ckpt_files
92
+
93
+
94
+ def get_optim_files(checkpoint_dir):
95
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
96
+
97
+
98
+ def get_model_state_files(checkpoint_dir):
99
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
100
+
101
+
102
+ def parse_model_states(files):
103
+ zero_model_states = []
104
+ for file in files:
105
+ state_dict = torch.load(file, map_location=device, weights_only=False)
106
+
107
+ if BUFFER_NAMES not in state_dict:
108
+ raise ValueError(f"{file} is not a model state checkpoint")
109
+ buffer_names = state_dict[BUFFER_NAMES]
110
+ if debug:
111
+ print("Found buffers:", buffer_names)
112
+
113
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
114
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
115
+ param_shapes = state_dict[PARAM_SHAPES]
116
+
117
+ # collect parameters that are included in param_shapes
118
+ param_names = []
119
+ for s in param_shapes:
120
+ for name in s.keys():
121
+ param_names.append(name)
122
+
123
+ # update with frozen parameters
124
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
125
+ if frozen_param_shapes is not None:
126
+ if debug:
127
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
128
+ param_names += list(frozen_param_shapes.keys())
129
+
130
+ # handle shared params
131
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
132
+
133
+ ds_version = state_dict.get(DS_VERSION, None)
134
+
135
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
136
+
137
+ z_model_state = zero_model_state(buffers=buffers,
138
+ param_shapes=param_shapes,
139
+ shared_params=shared_params,
140
+ ds_version=ds_version,
141
+ frozen_param_shapes=frozen_param_shapes,
142
+ frozen_param_fragments=frozen_param_fragments)
143
+ zero_model_states.append(z_model_state)
144
+
145
+ return zero_model_states
146
+
147
+
148
+ def parse_optim_states(files, ds_checkpoint_dir):
149
+ total_files = len(files)
150
+ state_dicts = []
151
+ for f in tqdm(files, desc='Loading checkpoint shards'):
152
+ state_dict = torch.load(f, map_location=device, mmap=True, weights_only=False)
153
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
154
+ # and also handle the case where it was already removed by another helper script
155
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
156
+ state_dicts.append(state_dict)
157
+
158
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
159
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
160
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
161
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
162
+
163
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
164
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
165
+ # use the max of the partition_count to get the dp world_size.
166
+
167
+ if type(world_size) is list:
168
+ world_size = max(world_size)
169
+
170
+ if world_size != total_files:
171
+ raise ValueError(
172
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
173
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
174
+ )
175
+
176
+ # the groups are named differently in each stage
177
+ if zero_stage <= 2:
178
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
179
+ elif zero_stage == 3:
180
+ fp32_groups_key = FP32_FLAT_GROUPS
181
+ else:
182
+ raise ValueError(f"unknown zero stage {zero_stage}")
183
+
184
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
185
+ return zero_stage, world_size, fp32_flat_groups
186
+
187
+
188
+ def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
189
+ """
190
+ Returns fp32 state_dict reconstructed from ds checkpoint
191
+
192
+ Args:
193
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
194
+
195
+ """
196
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
197
+
198
+ optim_files = get_optim_files(ds_checkpoint_dir)
199
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
200
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
201
+
202
+ model_files = get_model_state_files(ds_checkpoint_dir)
203
+
204
+ zero_model_states = parse_model_states(model_files)
205
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
206
+
207
+ if zero_stage <= 2:
208
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
209
+ exclude_frozen_parameters)
210
+ elif zero_stage == 3:
211
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
212
+ exclude_frozen_parameters)
213
+
214
+
215
+ def _zero2_merge_frozen_params(state_dict, zero_model_states):
216
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
217
+ return
218
+
219
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
220
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
221
+
222
+ if debug:
223
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
224
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
225
+
226
+ wanted_params = len(frozen_param_shapes)
227
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
228
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
229
+ print(f'Frozen params: Have {avail_numel} numels to process.')
230
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
231
+
232
+ total_params = 0
233
+ total_numel = 0
234
+ for name, shape in frozen_param_shapes.items():
235
+ total_params += 1
236
+ unpartitioned_numel = shape.numel()
237
+ total_numel += unpartitioned_numel
238
+
239
+ state_dict[name] = frozen_param_fragments[name]
240
+
241
+ if debug:
242
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
243
+
244
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
245
+
246
+
247
+ def _has_callable(obj, fn):
248
+ attr = getattr(obj, fn, None)
249
+ return callable(attr)
250
+
251
+
252
+ def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
253
+ param_shapes = zero_model_states[0].param_shapes
254
+
255
+ # Reconstruction protocol:
256
+ #
257
+ # XXX: document this
258
+
259
+ if debug:
260
+ for i in range(world_size):
261
+ for j in range(len(fp32_flat_groups[0])):
262
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
263
+
264
+ # XXX: memory usage doubles here (zero2)
265
+ num_param_groups = len(fp32_flat_groups[0])
266
+ merged_single_partition_of_fp32_groups = []
267
+ for i in range(num_param_groups):
268
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
269
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
270
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
271
+ avail_numel = sum(
272
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
273
+
274
+ if debug:
275
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
276
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
277
+ # not asserting if there is a mismatch due to possible padding
278
+ print(f"Have {avail_numel} numels to process.")
279
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
280
+
281
+ # params
282
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
283
+ # out-of-core computing solution
284
+ total_numel = 0
285
+ total_params = 0
286
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
287
+ offset = 0
288
+ avail_numel = full_single_fp32_vector.numel()
289
+ for name, shape in shapes.items():
290
+
291
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
292
+ total_numel += unpartitioned_numel
293
+ total_params += 1
294
+
295
+ if debug:
296
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
297
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
298
+ offset += unpartitioned_numel
299
+
300
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
301
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
302
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
303
+ # live optimizer object, so we are checking that the numbers are within the right range
304
+ align_to = 2 * world_size
305
+
306
+ def zero2_align(x):
307
+ return align_to * math.ceil(x / align_to)
308
+
309
+ if debug:
310
+ print(f"original offset={offset}, avail_numel={avail_numel}")
311
+
312
+ offset = zero2_align(offset)
313
+ avail_numel = zero2_align(avail_numel)
314
+
315
+ if debug:
316
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
317
+
318
+ # Sanity check
319
+ if offset != avail_numel:
320
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
321
+
322
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
323
+
324
+
325
+ def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
326
+ exclude_frozen_parameters):
327
+ state_dict = OrderedDict()
328
+
329
+ # buffers
330
+ buffers = zero_model_states[0].buffers
331
+ state_dict.update(buffers)
332
+ if debug:
333
+ print(f"added {len(buffers)} buffers")
334
+
335
+ if not exclude_frozen_parameters:
336
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
337
+
338
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
339
+
340
+ # recover shared parameters
341
+ for pair in zero_model_states[0].shared_params:
342
+ if pair[1] in state_dict:
343
+ state_dict[pair[0]] = state_dict[pair[1]]
344
+
345
+ return state_dict
346
+
347
+
348
+ def zero3_partitioned_param_info(unpartitioned_numel, world_size):
349
+ remainder = unpartitioned_numel % world_size
350
+ padding_numel = (world_size - remainder) if remainder else 0
351
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
352
+ return partitioned_numel, padding_numel
353
+
354
+
355
+ def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
356
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
357
+ return
358
+
359
+ if debug:
360
+ for i in range(world_size):
361
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
362
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
363
+
364
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
365
+ wanted_params = len(frozen_param_shapes)
366
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
367
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
368
+ print(f'Frozen params: Have {avail_numel} numels to process.')
369
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
370
+
371
+ total_params = 0
372
+ total_numel = 0
373
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
374
+ total_params += 1
375
+ unpartitioned_numel = shape.numel()
376
+ total_numel += unpartitioned_numel
377
+
378
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
379
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
380
+
381
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
382
+
383
+ if debug:
384
+ print(
385
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
386
+ )
387
+
388
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
389
+
390
+
391
+ class GatheredTensor:
392
+ """
393
+ A pseudo tensor that collects partitioned weights.
394
+ It is more memory efficient when there are multiple groups.
395
+ """
396
+
397
+ def __init__(self, flat_groups, flat_groups_offset, offset, partitioned_numel, shape):
398
+ self.flat_groups = flat_groups
399
+ self.flat_groups_offset = flat_groups_offset
400
+ self.offset = offset
401
+ self.partitioned_numel = partitioned_numel
402
+ self.shape = shape
403
+ self.dtype = self.flat_groups[0][0].dtype
404
+
405
+ def contiguous(self):
406
+ """
407
+ Merge partitioned weights from flat_groups into a single tensor.
408
+ """
409
+ end_idx = self.offset + self.partitioned_numel
410
+ world_size = len(self.flat_groups)
411
+ pad_flat_param_chunks = []
412
+
413
+ for rank_i in range(world_size):
414
+ # for each rank, we need to collect weights from related group/groups
415
+ flat_groups_at_rank_i = self.flat_groups[rank_i]
416
+ start_group_id = None
417
+ end_group_id = None
418
+ for group_id in range(len(self.flat_groups_offset)):
419
+ if self.flat_groups_offset[group_id] <= self.offset < self.flat_groups_offset[group_id + 1]:
420
+ start_group_id = group_id
421
+ if self.flat_groups_offset[group_id] < end_idx <= self.flat_groups_offset[group_id + 1]:
422
+ end_group_id = group_id
423
+ break
424
+ # collect weights from related group/groups
425
+ for group_id in range(start_group_id, end_group_id + 1):
426
+ flat_tensor = flat_groups_at_rank_i[group_id]
427
+ start_offset = self.offset - self.flat_groups_offset[group_id]
428
+ end_offset = min(end_idx, self.flat_groups_offset[group_id + 1]) - self.flat_groups_offset[group_id]
429
+ pad_flat_param_chunks.append(flat_tensor[start_offset:end_offset])
430
+
431
+ # collect weights from all ranks
432
+ pad_flat_param = torch.cat(pad_flat_param_chunks, dim=0)
433
+ param = pad_flat_param[:self.shape.numel()].view(self.shape).contiguous()
434
+ return param
435
+
436
+
437
+ def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
438
+ param_shapes = zero_model_states[0].param_shapes
439
+ avail_numel = sum([flat_group.numel() for flat_group in fp32_flat_groups[0]]) * world_size
440
+
441
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
442
+ # param, re-consolidating each param, while dealing with padding if any
443
+
444
+ # merge list of dicts, preserving order
445
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
446
+
447
+ if debug:
448
+ for i in range(world_size):
449
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
450
+
451
+ wanted_params = len(param_shapes)
452
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
453
+ # not asserting if there is a mismatch due to possible padding
454
+ avail_numel = fp32_flat_groups[0].numel() * world_size
455
+ print(f"Trainable params: Have {avail_numel} numels to process.")
456
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
457
+
458
+ # params
459
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
460
+ # out-of-core computing solution
461
+ offset = 0
462
+ total_numel = 0
463
+ total_params = 0
464
+ flat_groups_offset = [0] + list(np.cumsum([flat_tensor.numel() for flat_tensor in fp32_flat_groups[0]]))
465
+ for name, shape in tqdm(param_shapes.items(), desc='Gathering sharded weights'):
466
+ unpartitioned_numel = shape.numel()
467
+ total_numel += unpartitioned_numel
468
+ total_params += 1
469
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
470
+
471
+ if debug:
472
+ print(
473
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
474
+ )
475
+
476
+ # memory efficient tensor
477
+ tensor = GatheredTensor(fp32_flat_groups, flat_groups_offset, offset, partitioned_numel, shape)
478
+ state_dict[name] = tensor
479
+ offset += partitioned_numel
480
+
481
+ offset *= world_size
482
+
483
+ # Sanity check
484
+ if offset != avail_numel:
485
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
486
+
487
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
488
+
489
+
490
+ def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
491
+ exclude_frozen_parameters):
492
+ state_dict = OrderedDict()
493
+
494
+ # buffers
495
+ buffers = zero_model_states[0].buffers
496
+ state_dict.update(buffers)
497
+ if debug:
498
+ print(f"added {len(buffers)} buffers")
499
+
500
+ if not exclude_frozen_parameters:
501
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
502
+
503
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
504
+
505
+ # recover shared parameters
506
+ for pair in zero_model_states[0].shared_params:
507
+ if pair[1] in state_dict:
508
+ state_dict[pair[0]] = state_dict[pair[1]]
509
+
510
+ return state_dict
511
+
512
+
513
+ def to_torch_tensor(state_dict, return_empty_tensor=False):
514
+ """
515
+ Convert state_dict of GatheredTensor to torch tensor
516
+ """
517
+ torch_state_dict = {}
518
+ converted_tensors = {}
519
+ for name, tensor in state_dict.items():
520
+ tensor_id = id(tensor)
521
+ if tensor_id in converted_tensors: # shared tensors
522
+ shared_tensor = torch_state_dict[converted_tensors[tensor_id]]
523
+ torch_state_dict[name] = shared_tensor
524
+ else:
525
+ converted_tensors[tensor_id] = name
526
+ if return_empty_tensor:
527
+ torch_state_dict[name] = torch.empty(tensor.shape, dtype=tensor.dtype)
528
+ else:
529
+ torch_state_dict[name] = tensor.contiguous()
530
+ return torch_state_dict
531
+
532
+
533
+ def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
534
+ tag=None,
535
+ exclude_frozen_parameters=False,
536
+ lazy_mode=False):
537
+ """
538
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
539
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
540
+ via a model hub.
541
+
542
+ Args:
543
+ - ``checkpoint_dir``: path to the desired checkpoint folder
544
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
545
+ - ``exclude_frozen_parameters``: exclude frozen parameters
546
+ - ``lazy_mode``: get state_dict in lazy mode. It returns a dict of pesduo tensor instead of torch tensor, which is more memory efficient.
547
+ Convert the pesduo tensor to torch tensor by ``.contiguous()``
548
+
549
+ Returns:
550
+ - pytorch ``state_dict``
551
+
552
+ A typical usage might be ::
553
+
554
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
555
+ # do the training and checkpoint saving
556
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
557
+ model = model.cpu() # move to cpu
558
+ model.load_state_dict(state_dict)
559
+ # submit to model hub or save the model to share with others
560
+
561
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
562
+ application. i.e. you will need to re-initialize the deepspeed engine, since
563
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
564
+
565
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
566
+
567
+ Note: the above usage may not work if your application doesn't have sufficient free CPU memory.
568
+ You may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
569
+ the checkpoint. Or you can load state_dict in lazy mode ::
570
+
571
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
572
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, lazy_mode=True) # not on cpu
573
+ for name, lazy_tensor in state_dict.item():
574
+ tensor = lazy_tensor.contiguous() # to cpu
575
+ print(name, tensor)
576
+ # del tensor to release memory if it no longer in use
577
+ """
578
+ if tag is None:
579
+ latest_path = os.path.join(checkpoint_dir, 'latest')
580
+ if os.path.isfile(latest_path):
581
+ with open(latest_path, 'r') as fd:
582
+ tag = fd.read().strip()
583
+ else:
584
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
585
+
586
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
587
+
588
+ if not os.path.isdir(ds_checkpoint_dir):
589
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
590
+
591
+ state_dict = _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
592
+ if lazy_mode:
593
+ return state_dict
594
+ else:
595
+ return to_torch_tensor(state_dict)
596
+
597
+
598
+ def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir,
599
+ output_dir,
600
+ max_shard_size="5GB",
601
+ safe_serialization=False,
602
+ tag=None,
603
+ exclude_frozen_parameters=False):
604
+ """
605
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
606
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
607
+
608
+ Args:
609
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
610
+ - ``output_dir``: directory to the pytorch fp32 state_dict output files
611
+ - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB
612
+ - ``safe_serialization``: whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
613
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
614
+ - ``exclude_frozen_parameters``: exclude frozen parameters
615
+ """
616
+
617
+ # Dependency pre-check
618
+ if safe_serialization:
619
+ try:
620
+ from safetensors.torch import save_file
621
+ except ImportError:
622
+ print('If you want to use `safe_serialization`, please `pip install safetensors`')
623
+ raise
624
+ if max_shard_size is not None:
625
+ try:
626
+ from huggingface_hub import split_torch_state_dict_into_shards
627
+ except ImportError:
628
+ print('If you want to use `max_shard_size`, please `pip install huggingface_hub`')
629
+ raise
630
+
631
+ # Convert zero checkpoint to state_dict
632
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
633
+ tag,
634
+ exclude_frozen_parameters,
635
+ lazy_mode=True)
636
+
637
+ # Shard the model if it is too big.
638
+ weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin"
639
+ if max_shard_size is not None:
640
+ filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors")
641
+ # an memory-efficient approach for sharding
642
+ empty_state_dict = to_torch_tensor(state_dict, return_empty_tensor=True)
643
+ state_dict_split = split_torch_state_dict_into_shards(empty_state_dict,
644
+ filename_pattern=filename_pattern,
645
+ max_shard_size=max_shard_size)
646
+ else:
647
+ from collections import namedtuple
648
+ StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"])
649
+ state_dict_split = StateDictSplit(is_sharded=False,
650
+ filename_to_tensors={weights_name: list(state_dict.keys())})
651
+
652
+ # Save the model by shard
653
+ os.makedirs(output_dir, exist_ok=True)
654
+ filename_to_tensors = state_dict_split.filename_to_tensors.items()
655
+ for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"):
656
+ shard_state_dict = {tensor_name: state_dict[tensor_name] for tensor_name in tensors}
657
+ shard_state_dict = to_torch_tensor(shard_state_dict)
658
+ output_path = os.path.join(output_dir, shard_file)
659
+ if safe_serialization:
660
+ save_file(shard_state_dict, output_path, metadata={"format": "pt"})
661
+ else:
662
+ torch.save(shard_state_dict, output_path)
663
+ # release the memory of current shard
664
+ for tensor_name in list(shard_state_dict.keys()):
665
+ del state_dict[tensor_name]
666
+ del shard_state_dict[tensor_name]
667
+ del shard_state_dict
668
+ gc.collect()
669
+
670
+ # Save index if sharded
671
+ if state_dict_split.is_sharded:
672
+ index = {
673
+ "metadata": state_dict_split.metadata,
674
+ "weight_map": state_dict_split.tensor_to_filename,
675
+ }
676
+ save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json"
677
+ save_index_file = os.path.join(output_dir, save_index_file)
678
+ with open(save_index_file, "w", encoding="utf-8") as f:
679
+ content = json.dumps(index, indent=2, sort_keys=True) + "\n"
680
+ f.write(content)
681
+
682
+
683
+ def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
684
+ """
685
+ 1. Put the provided model to cpu
686
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
687
+ 3. Load it into the provided model
688
+
689
+ Args:
690
+ - ``model``: the model object to update
691
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
692
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
693
+
694
+ Returns:
695
+ - ``model`: modified model
696
+
697
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
698
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
699
+ conveniently placed for you in the checkpoint folder.
700
+
701
+ A typical usage might be ::
702
+
703
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
704
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
705
+ # submit to model hub or save the model to share with others
706
+
707
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
708
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
709
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
710
+
711
+ """
712
+ logger.info(f"Extracting fp32 weights")
713
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
714
+
715
+ logger.info(f"Overwriting model with fp32 weights")
716
+ model = model.cpu()
717
+ model.load_state_dict(state_dict, strict=False)
718
+
719
+ return model
720
+
721
+
722
+ if __name__ == "__main__":
723
+ parser = argparse.ArgumentParser()
724
+ parser.add_argument("checkpoint_dir",
725
+ type=str,
726
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
727
+ parser.add_argument("output_dir",
728
+ type=str,
729
+ help="directory to the pytorch fp32 state_dict output files"
730
+ "(e.g. path/checkpoint-12-output/)")
731
+ parser.add_argument(
732
+ "--max_shard_size",
733
+ type=str,
734
+ default="5GB",
735
+ help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size"
736
+ "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`"
737
+ "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances"
738
+ "without CPU OOM issues.")
739
+ parser.add_argument(
740
+ "--safe_serialization",
741
+ default=False,
742
+ action='store_true',
743
+ help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).")
744
+ parser.add_argument("-t",
745
+ "--tag",
746
+ type=str,
747
+ default=None,
748
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
749
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
750
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
751
+ args = parser.parse_args()
752
+
753
+ debug = args.debug
754
+
755
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
756
+ args.output_dir,
757
+ max_shard_size=args.max_shard_size,
758
+ safe_serialization=args.safe_serialization,
759
+ tag=args.tag,
760
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
special_tokens_map.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|end_of_text|>"
17
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2065 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "clean_up_tokenization_spaces": true,
2054
+ "eos_token": "<|end_of_text|>",
2055
+ "extra_special_tokens": {},
2056
+ "model_input_names": [
2057
+ "input_ids",
2058
+ "attention_mask"
2059
+ ],
2060
+ "model_max_length": 131072,
2061
+ "pad_token": "<|end_of_text|>",
2062
+ "padding_side": "right",
2063
+ "split_special_tokens": false,
2064
+ "tokenizer_class": "PreTrainedTokenizerFast"
2065
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 5.0,
3
+ "num_input_tokens_seen": 100663200,
4
+ "total_flos": 2377568804143104.0,
5
+ "train_loss": 0.697018449810835,
6
+ "train_runtime": 22008.2973,
7
+ "train_samples_per_second": 4.45,
8
+ "train_steps_per_second": 0.012
9
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,273 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 385, "loss": 1.9495, "lr": 0.0001, "epoch": 0.013029315960912053, "percentage": 0.26, "elapsed_time": "0:01:01", "remaining_time": "6:33:13", "throughput": 4262.41, "total_tokens": 261888}
2
+ {"current_steps": 2, "total_steps": 385, "loss": 1.9305, "lr": 9.999833537702192e-05, "epoch": 0.026058631921824105, "percentage": 0.52, "elapsed_time": "0:01:59", "remaining_time": "6:20:29", "throughput": 4393.49, "total_tokens": 523776}
3
+ {"current_steps": 3, "total_steps": 385, "loss": 1.9086, "lr": 9.999334161892648e-05, "epoch": 0.03908794788273615, "percentage": 0.78, "elapsed_time": "0:02:57", "remaining_time": "6:15:46", "throughput": 4437.13, "total_tokens": 785664}
4
+ {"current_steps": 1, "total_steps": 195, "loss": 1.9471, "lr": 0.0001, "epoch": 0.025974025974025976, "percentage": 0.51, "elapsed_time": "0:02:03", "remaining_time": "6:39:29", "throughput": 4239.3, "total_tokens": 523776}
5
+ {"current_steps": 2, "total_steps": 195, "loss": 1.9384, "lr": 9.999351124856874e-05, "epoch": 0.05194805194805195, "percentage": 1.03, "elapsed_time": "0:04:02", "remaining_time": "6:30:22", "throughput": 4315.8, "total_tokens": 1047552}
6
+ {"current_steps": 1, "total_steps": 195, "loss": 1.9471, "lr": 0.0001, "epoch": 0.026058631921824105, "percentage": 0.51, "elapsed_time": "0:01:58", "remaining_time": "6:23:41", "throughput": 4413.77, "total_tokens": 523776}
7
+ {"current_steps": 1, "total_steps": 385, "loss": 1.9495, "lr": 0.0001, "epoch": 0.013029315960912053, "percentage": 0.26, "elapsed_time": "0:01:01", "remaining_time": "6:36:33", "throughput": 4226.57, "total_tokens": 261888}
8
+ {"current_steps": 2, "total_steps": 385, "loss": 1.9304, "lr": 9.999833537702192e-05, "epoch": 0.026058631921824105, "percentage": 0.52, "elapsed_time": "0:01:59", "remaining_time": "6:22:12", "throughput": 4373.84, "total_tokens": 523776}
9
+ {"current_steps": 3, "total_steps": 385, "loss": 1.9078, "lr": 9.999334161892648e-05, "epoch": 0.03908794788273615, "percentage": 0.78, "elapsed_time": "0:02:57", "remaining_time": "6:17:06", "throughput": 4421.44, "total_tokens": 785664}
10
+ {"current_steps": 4, "total_steps": 385, "loss": 1.8593, "lr": 9.998501905822266e-05, "epoch": 0.05211726384364821, "percentage": 1.04, "elapsed_time": "0:03:55", "remaining_time": "6:14:23", "throughput": 4441.89, "total_tokens": 1047552}
11
+ {"current_steps": 5, "total_steps": 385, "loss": 1.8125, "lr": 9.997336824906748e-05, "epoch": 0.06514657980456026, "percentage": 1.3, "elapsed_time": "0:04:54", "remaining_time": "6:12:28", "throughput": 4452.97, "total_tokens": 1309440}
12
+ {"current_steps": 6, "total_steps": 385, "loss": 1.8287, "lr": 9.995838996722914e-05, "epoch": 0.0781758957654723, "percentage": 1.56, "elapsed_time": "0:05:51", "remaining_time": "6:10:32", "throughput": 4464.52, "total_tokens": 1571328}
13
+ {"current_steps": 1, "total_steps": 260, "loss": 1.9481, "lr": 0.0001, "epoch": 0.01951219512195122, "percentage": 0.38, "elapsed_time": "0:01:29", "remaining_time": "6:25:15", "throughput": 4401.53, "total_tokens": 392832}
14
+ {"current_steps": 2, "total_steps": 260, "loss": 1.9426, "lr": 9.999635004278054e-05, "epoch": 0.03902439024390244, "percentage": 0.77, "elapsed_time": "0:02:55", "remaining_time": "6:16:18", "throughput": 4488.88, "total_tokens": 785664}
15
+ {"current_steps": 3, "total_steps": 260, "loss": 1.8808, "lr": 9.998540070400966e-05, "epoch": 0.05853658536585366, "percentage": 1.15, "elapsed_time": "0:04:20", "remaining_time": "6:12:00", "throughput": 4523.17, "total_tokens": 1178496}
16
+ {"current_steps": 4, "total_steps": 260, "loss": 1.8827, "lr": 9.996715358227206e-05, "epoch": 0.07804878048780488, "percentage": 1.54, "elapsed_time": "0:05:45", "remaining_time": "6:09:03", "throughput": 4541.61, "total_tokens": 1571328}
17
+ {"current_steps": 5, "total_steps": 260, "loss": 1.8196, "lr": 9.994161134161634e-05, "epoch": 0.0975609756097561, "percentage": 1.92, "elapsed_time": "0:07:11", "remaining_time": "6:06:46", "throughput": 4551.83, "total_tokens": 1964160}
18
+ {"current_steps": 6, "total_steps": 260, "loss": 1.7928, "lr": 9.990877771116589e-05, "epoch": 0.11707317073170732, "percentage": 2.31, "elapsed_time": "0:08:36", "remaining_time": "6:04:46", "throughput": 4559.0, "total_tokens": 2356992}
19
+ {"current_steps": 7, "total_steps": 260, "loss": 1.7726, "lr": 9.986865748457457e-05, "epoch": 0.13658536585365855, "percentage": 2.69, "elapsed_time": "0:10:02", "remaining_time": "6:02:58", "throughput": 4563.49, "total_tokens": 2749824}
20
+ {"current_steps": 8, "total_steps": 260, "loss": 1.7569, "lr": 9.982125651932681e-05, "epoch": 0.15609756097560976, "percentage": 3.08, "elapsed_time": "0:11:28", "remaining_time": "6:01:18", "throughput": 4566.44, "total_tokens": 3142656}
21
+ {"current_steps": 9, "total_steps": 260, "loss": 1.7263, "lr": 9.976658173588244e-05, "epoch": 0.17560975609756097, "percentage": 3.46, "elapsed_time": "0:12:53", "remaining_time": "5:59:43", "throughput": 4568.25, "total_tokens": 3535488}
22
+ {"current_steps": 10, "total_steps": 260, "loss": 1.7278, "lr": 9.970464111666626e-05, "epoch": 0.1951219512195122, "percentage": 3.85, "elapsed_time": "0:14:19", "remaining_time": "5:58:06", "throughput": 4570.67, "total_tokens": 3928320}
23
+ {"current_steps": 11, "total_steps": 260, "loss": 1.719, "lr": 9.96354437049027e-05, "epoch": 0.2146341463414634, "percentage": 4.23, "elapsed_time": "0:15:45", "remaining_time": "5:56:32", "throughput": 4572.36, "total_tokens": 4321152}
24
+ {"current_steps": 12, "total_steps": 260, "loss": 1.6873, "lr": 9.955899960329547e-05, "epoch": 0.23414634146341465, "percentage": 4.62, "elapsed_time": "0:17:10", "remaining_time": "5:54:59", "throughput": 4573.83, "total_tokens": 4713984}
25
+ {"current_steps": 13, "total_steps": 260, "loss": 1.6583, "lr": 9.947531997255256e-05, "epoch": 0.25365853658536586, "percentage": 5.0, "elapsed_time": "0:18:36", "remaining_time": "5:53:31", "throughput": 4574.38, "total_tokens": 5106816}
26
+ {"current_steps": 14, "total_steps": 260, "loss": 1.6484, "lr": 9.938441702975689e-05, "epoch": 0.2731707317073171, "percentage": 5.38, "elapsed_time": "0:20:02", "remaining_time": "5:52:02", "throughput": 4575.05, "total_tokens": 5499648}
27
+ {"current_steps": 15, "total_steps": 260, "loss": 1.6392, "lr": 9.928630404658255e-05, "epoch": 0.2926829268292683, "percentage": 5.77, "elapsed_time": "0:21:27", "remaining_time": "5:50:32", "throughput": 4576.01, "total_tokens": 5892480}
28
+ {"current_steps": 16, "total_steps": 260, "loss": 1.6079, "lr": 9.918099534735718e-05, "epoch": 0.3121951219512195, "percentage": 6.15, "elapsed_time": "0:22:53", "remaining_time": "5:49:01", "throughput": 4577.02, "total_tokens": 6285312}
29
+ {"current_steps": 17, "total_steps": 260, "loss": 1.614, "lr": 9.906850630697068e-05, "epoch": 0.33170731707317075, "percentage": 6.54, "elapsed_time": "0:24:18", "remaining_time": "5:47:34", "throughput": 4577.27, "total_tokens": 6678144}
30
+ {"current_steps": 18, "total_steps": 260, "loss": 1.5871, "lr": 9.894885334863044e-05, "epoch": 0.35121951219512193, "percentage": 6.92, "elapsed_time": "0:25:44", "remaining_time": "5:46:06", "throughput": 4577.86, "total_tokens": 7070976}
31
+ {"current_steps": 19, "total_steps": 260, "loss": 1.5864, "lr": 9.882205394146361e-05, "epoch": 0.37073170731707317, "percentage": 7.31, "elapsed_time": "0:27:10", "remaining_time": "5:44:37", "throughput": 4578.49, "total_tokens": 7463808}
32
+ {"current_steps": 20, "total_steps": 260, "loss": 1.547, "lr": 9.868812659796668e-05, "epoch": 0.3902439024390244, "percentage": 7.69, "elapsed_time": "0:28:35", "remaining_time": "5:43:09", "throughput": 4578.91, "total_tokens": 7856640}
33
+ {"current_steps": 21, "total_steps": 260, "loss": 1.5349, "lr": 9.85470908713026e-05, "epoch": 0.4097560975609756, "percentage": 8.08, "elapsed_time": "0:30:01", "remaining_time": "5:41:41", "throughput": 4579.45, "total_tokens": 8249472}
34
+ {"current_steps": 22, "total_steps": 260, "loss": 1.5123, "lr": 9.839896735244614e-05, "epoch": 0.4292682926829268, "percentage": 8.46, "elapsed_time": "0:31:26", "remaining_time": "5:40:13", "throughput": 4580.1, "total_tokens": 8642304}
35
+ {"current_steps": 23, "total_steps": 260, "loss": 1.4901, "lr": 9.824377766717759e-05, "epoch": 0.44878048780487806, "percentage": 8.85, "elapsed_time": "0:32:52", "remaining_time": "5:38:44", "throughput": 4580.68, "total_tokens": 9035136}
36
+ {"current_steps": 24, "total_steps": 260, "loss": 1.4924, "lr": 9.808154447292539e-05, "epoch": 0.4682926829268293, "percentage": 9.23, "elapsed_time": "0:34:18", "remaining_time": "5:37:17", "throughput": 4580.98, "total_tokens": 9427968}
37
+ {"current_steps": 25, "total_steps": 260, "loss": 1.4489, "lr": 9.791229145545831e-05, "epoch": 0.4878048780487805, "percentage": 9.62, "elapsed_time": "0:35:43", "remaining_time": "5:35:51", "throughput": 4581.1, "total_tokens": 9820800}
38
+ {"current_steps": 26, "total_steps": 260, "loss": 1.4188, "lr": 9.773604332542729e-05, "epoch": 0.5073170731707317, "percentage": 10.0, "elapsed_time": "0:37:09", "remaining_time": "5:34:24", "throughput": 4581.34, "total_tokens": 10213632}
39
+ {"current_steps": 27, "total_steps": 260, "loss": 1.4272, "lr": 9.755282581475769e-05, "epoch": 0.526829268292683, "percentage": 10.38, "elapsed_time": "0:38:34", "remaining_time": "5:32:56", "throughput": 4581.91, "total_tokens": 10606464}
40
+ {"current_steps": 28, "total_steps": 260, "loss": 1.3815, "lr": 9.736266567289253e-05, "epoch": 0.5463414634146342, "percentage": 10.77, "elapsed_time": "0:40:00", "remaining_time": "5:31:29", "throughput": 4582.24, "total_tokens": 10999296}
41
+ {"current_steps": 29, "total_steps": 260, "loss": 1.3752, "lr": 9.716559066288715e-05, "epoch": 0.5658536585365853, "percentage": 11.15, "elapsed_time": "0:41:26", "remaining_time": "5:30:02", "throughput": 4582.45, "total_tokens": 11392128}
42
+ {"current_steps": 30, "total_steps": 260, "loss": 1.3629, "lr": 9.696162955735577e-05, "epoch": 0.5853658536585366, "percentage": 11.54, "elapsed_time": "0:42:51", "remaining_time": "5:28:36", "throughput": 4582.63, "total_tokens": 11784960}
43
+ {"current_steps": 31, "total_steps": 260, "loss": 1.3163, "lr": 9.675081213427076e-05, "epoch": 0.6048780487804878, "percentage": 11.92, "elapsed_time": "0:44:17", "remaining_time": "5:27:09", "throughput": 4582.94, "total_tokens": 12177792}
44
+ {"current_steps": 32, "total_steps": 260, "loss": 1.3058, "lr": 9.653316917261512e-05, "epoch": 0.624390243902439, "percentage": 12.31, "elapsed_time": "0:45:42", "remaining_time": "5:25:41", "throughput": 4583.34, "total_tokens": 12570624}
45
+ {"current_steps": 33, "total_steps": 260, "loss": 1.2774, "lr": 9.630873244788883e-05, "epoch": 0.6439024390243903, "percentage": 12.69, "elapsed_time": "0:47:08", "remaining_time": "5:24:14", "throughput": 4583.78, "total_tokens": 12963456}
46
+ {"current_steps": 34, "total_steps": 260, "loss": 1.2585, "lr": 9.607753472746966e-05, "epoch": 0.6634146341463415, "percentage": 13.08, "elapsed_time": "0:48:33", "remaining_time": "5:22:46", "throughput": 4584.21, "total_tokens": 13356288}
47
+ {"current_steps": 35, "total_steps": 260, "loss": 1.2429, "lr": 9.583960976582913e-05, "epoch": 0.6829268292682927, "percentage": 13.46, "elapsed_time": "0:49:59", "remaining_time": "5:21:19", "throughput": 4584.55, "total_tokens": 13749120}
48
+ {"current_steps": 36, "total_steps": 260, "loss": 1.2092, "lr": 9.559499229960451e-05, "epoch": 0.7024390243902439, "percentage": 13.85, "elapsed_time": "0:51:24", "remaining_time": "5:19:52", "throughput": 4584.71, "total_tokens": 14141952}
49
+ {"current_steps": 37, "total_steps": 260, "loss": 1.1789, "lr": 9.534371804252728e-05, "epoch": 0.7219512195121951, "percentage": 14.23, "elapsed_time": "0:52:50", "remaining_time": "5:18:28", "throughput": 4584.43, "total_tokens": 14534784}
50
+ {"current_steps": 38, "total_steps": 260, "loss": 1.1495, "lr": 9.508582368020897e-05, "epoch": 0.7414634146341463, "percentage": 14.62, "elapsed_time": "0:54:16", "remaining_time": "5:17:02", "throughput": 4584.52, "total_tokens": 14927616}
51
+ {"current_steps": 39, "total_steps": 260, "loss": 1.141, "lr": 9.482134686478519e-05, "epoch": 0.7609756097560976, "percentage": 15.0, "elapsed_time": "0:55:41", "remaining_time": "5:15:35", "throughput": 4584.91, "total_tokens": 15320448}
52
+ {"current_steps": 40, "total_steps": 260, "loss": 1.1236, "lr": 9.45503262094184e-05, "epoch": 0.7804878048780488, "percentage": 15.38, "elapsed_time": "0:57:06", "remaining_time": "5:14:08", "throughput": 4585.17, "total_tokens": 15713280}
53
+ {"current_steps": 41, "total_steps": 260, "loss": 1.1034, "lr": 9.42728012826605e-05, "epoch": 0.8, "percentage": 15.77, "elapsed_time": "0:58:32", "remaining_time": "5:12:41", "throughput": 4585.36, "total_tokens": 16106112}
54
+ {"current_steps": 42, "total_steps": 260, "loss": 1.1052, "lr": 9.398881260267589e-05, "epoch": 0.8195121951219512, "percentage": 16.15, "elapsed_time": "0:59:57", "remaining_time": "5:11:15", "throughput": 4585.64, "total_tokens": 16498944}
55
+ {"current_steps": 43, "total_steps": 260, "loss": 1.0595, "lr": 9.36984016313259e-05, "epoch": 0.8390243902439024, "percentage": 16.54, "elapsed_time": "1:01:23", "remaining_time": "5:09:48", "throughput": 4585.99, "total_tokens": 16891776}
56
+ {"current_steps": 44, "total_steps": 260, "loss": 1.0507, "lr": 9.340161076811537e-05, "epoch": 0.8585365853658536, "percentage": 16.92, "elapsed_time": "1:02:48", "remaining_time": "5:08:21", "throughput": 4586.27, "total_tokens": 17284608}
57
+ {"current_steps": 45, "total_steps": 260, "loss": 1.0204, "lr": 9.309848334400246e-05, "epoch": 0.8780487804878049, "percentage": 17.31, "elapsed_time": "1:04:14", "remaining_time": "5:06:54", "throughput": 4586.51, "total_tokens": 17677440}
58
+ {"current_steps": 46, "total_steps": 260, "loss": 1.0275, "lr": 9.278906361507238e-05, "epoch": 0.8975609756097561, "percentage": 17.69, "elapsed_time": "1:05:39", "remaining_time": "5:05:27", "throughput": 4586.83, "total_tokens": 18070272}
59
+ {"current_steps": 47, "total_steps": 260, "loss": 0.9942, "lr": 9.247339675607605e-05, "epoch": 0.9170731707317074, "percentage": 18.08, "elapsed_time": "1:07:05", "remaining_time": "5:04:01", "throughput": 4586.88, "total_tokens": 18463104}
60
+ {"current_steps": 48, "total_steps": 260, "loss": 0.9636, "lr": 9.215152885383474e-05, "epoch": 0.9365853658536586, "percentage": 18.46, "elapsed_time": "1:08:30", "remaining_time": "5:02:35", "throughput": 4587.06, "total_tokens": 18855936}
61
+ {"current_steps": 49, "total_steps": 260, "loss": 0.9595, "lr": 9.182350690051133e-05, "epoch": 0.9560975609756097, "percentage": 18.85, "elapsed_time": "1:09:56", "remaining_time": "5:01:08", "throughput": 4587.31, "total_tokens": 19248768}
62
+ {"current_steps": 50, "total_steps": 260, "loss": 0.9406, "lr": 9.148937878674976e-05, "epoch": 0.975609756097561, "percentage": 19.23, "elapsed_time": "1:11:21", "remaining_time": "4:59:42", "throughput": 4587.55, "total_tokens": 19641600}
63
+ {"current_steps": 51, "total_steps": 260, "loss": 0.9266, "lr": 9.114919329468282e-05, "epoch": 0.9951219512195122, "percentage": 19.62, "elapsed_time": "1:12:46", "remaining_time": "4:58:15", "throughput": 4587.78, "total_tokens": 20034432}
64
+ {"current_steps": 52, "total_steps": 260, "loss": 0.9145, "lr": 9.080300009081024e-05, "epoch": 1.0, "percentage": 20.0, "elapsed_time": "1:13:08", "remaining_time": "4:52:33", "throughput": 4587.79, "total_tokens": 20132640}
65
+ {"current_steps": 53, "total_steps": 260, "loss": 0.8835, "lr": 9.045084971874738e-05, "epoch": 1.0195121951219512, "percentage": 20.38, "elapsed_time": "1:14:33", "remaining_time": "4:51:13", "throughput": 4587.92, "total_tokens": 20525472}
66
+ {"current_steps": 54, "total_steps": 260, "loss": 0.8894, "lr": 9.009279359184593e-05, "epoch": 1.0390243902439025, "percentage": 20.77, "elapsed_time": "1:15:59", "remaining_time": "4:49:52", "throughput": 4588.1, "total_tokens": 20918304}
67
+ {"current_steps": 55, "total_steps": 260, "loss": 0.8801, "lr": 8.972888398568772e-05, "epoch": 1.0585365853658537, "percentage": 21.15, "elapsed_time": "1:17:24", "remaining_time": "4:48:32", "throughput": 4588.24, "total_tokens": 21311136}
68
+ {"current_steps": 56, "total_steps": 260, "loss": 0.8761, "lr": 8.935917403045251e-05, "epoch": 1.078048780487805, "percentage": 21.54, "elapsed_time": "1:18:50", "remaining_time": "4:47:11", "throughput": 4588.43, "total_tokens": 21703968}
69
+ {"current_steps": 57, "total_steps": 260, "loss": 0.8785, "lr": 8.898371770316111e-05, "epoch": 1.0975609756097562, "percentage": 21.92, "elapsed_time": "1:20:15", "remaining_time": "4:45:50", "throughput": 4588.63, "total_tokens": 22096800}
70
+ {"current_steps": 58, "total_steps": 260, "loss": 0.8964, "lr": 8.860256981979484e-05, "epoch": 1.1170731707317074, "percentage": 22.31, "elapsed_time": "1:21:40", "remaining_time": "4:44:28", "throughput": 4588.79, "total_tokens": 22489632}
71
+ {"current_steps": 59, "total_steps": 260, "loss": 0.8833, "lr": 8.821578602729242e-05, "epoch": 1.1365853658536587, "percentage": 22.69, "elapsed_time": "1:23:06", "remaining_time": "4:43:07", "throughput": 4588.98, "total_tokens": 22882464}
72
+ {"current_steps": 60, "total_steps": 260, "loss": 0.8698, "lr": 8.782342279542568e-05, "epoch": 1.1560975609756097, "percentage": 23.08, "elapsed_time": "1:24:31", "remaining_time": "4:41:46", "throughput": 4589.15, "total_tokens": 23275296}
73
+ {"current_steps": 61, "total_steps": 260, "loss": 0.864, "lr": 8.742553740855506e-05, "epoch": 1.175609756097561, "percentage": 23.46, "elapsed_time": "1:25:57", "remaining_time": "4:40:24", "throughput": 4589.34, "total_tokens": 23668128}
74
+ {"current_steps": 62, "total_steps": 260, "loss": 0.8551, "lr": 8.702218795726617e-05, "epoch": 1.1951219512195121, "percentage": 23.85, "elapsed_time": "1:27:22", "remaining_time": "4:39:02", "throughput": 4589.53, "total_tokens": 24060960}
75
+ {"current_steps": 63, "total_steps": 260, "loss": 0.8327, "lr": 8.661343332988869e-05, "epoch": 1.2146341463414634, "percentage": 24.23, "elapsed_time": "1:28:48", "remaining_time": "4:37:40", "throughput": 4589.65, "total_tokens": 24453792}
76
+ {"current_steps": 64, "total_steps": 260, "loss": 0.8003, "lr": 8.619933320389872e-05, "epoch": 1.2341463414634146, "percentage": 24.62, "elapsed_time": "1:30:13", "remaining_time": "4:36:18", "throughput": 4589.76, "total_tokens": 24846624}
77
+ {"current_steps": 65, "total_steps": 260, "loss": 0.8078, "lr": 8.577994803720606e-05, "epoch": 1.2536585365853659, "percentage": 25.0, "elapsed_time": "1:31:38", "remaining_time": "4:34:56", "throughput": 4589.87, "total_tokens": 25239456}
78
+ {"current_steps": 66, "total_steps": 260, "loss": 0.8033, "lr": 8.535533905932738e-05, "epoch": 1.273170731707317, "percentage": 25.38, "elapsed_time": "1:33:04", "remaining_time": "4:33:34", "throughput": 4589.99, "total_tokens": 25632288}
79
+ {"current_steps": 67, "total_steps": 260, "loss": 0.7751, "lr": 8.492556826244687e-05, "epoch": 1.2926829268292683, "percentage": 25.77, "elapsed_time": "1:34:29", "remaining_time": "4:32:12", "throughput": 4590.14, "total_tokens": 26025120}
80
+ {"current_steps": 68, "total_steps": 260, "loss": 0.7998, "lr": 8.449069839236538e-05, "epoch": 1.3121951219512196, "percentage": 26.15, "elapsed_time": "1:35:55", "remaining_time": "4:30:49", "throughput": 4590.33, "total_tokens": 26417952}
81
+ {"current_steps": 69, "total_steps": 260, "loss": 0.7949, "lr": 8.405079293933986e-05, "epoch": 1.3317073170731708, "percentage": 26.54, "elapsed_time": "1:37:20", "remaining_time": "4:29:27", "throughput": 4590.48, "total_tokens": 26810784}
82
+ {"current_steps": 70, "total_steps": 260, "loss": 0.7718, "lr": 8.360591612881364e-05, "epoch": 1.3512195121951218, "percentage": 26.92, "elapsed_time": "1:38:45", "remaining_time": "4:28:04", "throughput": 4590.6, "total_tokens": 27203616}
83
+ {"current_steps": 71, "total_steps": 260, "loss": 0.7827, "lr": 8.315613291203976e-05, "epoch": 1.370731707317073, "percentage": 27.31, "elapsed_time": "1:40:11", "remaining_time": "4:26:42", "throughput": 4590.69, "total_tokens": 27596448}
84
+ {"current_steps": 72, "total_steps": 260, "loss": 0.7595, "lr": 8.270150895659823e-05, "epoch": 1.3902439024390243, "percentage": 27.69, "elapsed_time": "1:41:36", "remaining_time": "4:25:19", "throughput": 4590.74, "total_tokens": 27989280}
85
+ {"current_steps": 73, "total_steps": 260, "loss": 0.7416, "lr": 8.224211063680853e-05, "epoch": 1.4097560975609755, "percentage": 28.08, "elapsed_time": "1:43:02", "remaining_time": "4:23:56", "throughput": 4590.84, "total_tokens": 28382112}
86
+ {"current_steps": 74, "total_steps": 260, "loss": 0.749, "lr": 8.177800502403928e-05, "epoch": 1.4292682926829268, "percentage": 28.46, "elapsed_time": "1:44:27", "remaining_time": "4:22:34", "throughput": 4590.89, "total_tokens": 28774944}
87
+ {"current_steps": 75, "total_steps": 260, "loss": 0.72, "lr": 8.130925987691569e-05, "epoch": 1.448780487804878, "percentage": 28.85, "elapsed_time": "1:45:53", "remaining_time": "4:21:11", "throughput": 4590.96, "total_tokens": 29167776}
88
+ {"current_steps": 76, "total_steps": 260, "loss": 0.7192, "lr": 8.083594363142717e-05, "epoch": 1.4682926829268292, "percentage": 29.23, "elapsed_time": "1:47:18", "remaining_time": "4:19:48", "throughput": 4591.02, "total_tokens": 29560608}
89
+ {"current_steps": 77, "total_steps": 260, "loss": 0.7221, "lr": 8.035812539093557e-05, "epoch": 1.4878048780487805, "percentage": 29.62, "elapsed_time": "1:48:44", "remaining_time": "4:18:26", "throughput": 4591.0, "total_tokens": 29953440}
90
+ {"current_steps": 78, "total_steps": 260, "loss": 0.7031, "lr": 7.987587491608637e-05, "epoch": 1.5073170731707317, "percentage": 30.0, "elapsed_time": "1:50:10", "remaining_time": "4:17:03", "throughput": 4590.94, "total_tokens": 30346272}
91
+ {"current_steps": 79, "total_steps": 260, "loss": 0.7118, "lr": 7.938926261462366e-05, "epoch": 1.526829268292683, "percentage": 30.38, "elapsed_time": "1:51:35", "remaining_time": "4:15:40", "throughput": 4590.95, "total_tokens": 30739104}
92
+ {"current_steps": 80, "total_steps": 260, "loss": 0.6952, "lr": 7.889835953111075e-05, "epoch": 1.5463414634146342, "percentage": 30.77, "elapsed_time": "1:53:01", "remaining_time": "4:14:18", "throughput": 4590.72, "total_tokens": 31131936}
93
+ {"current_steps": 81, "total_steps": 260, "loss": 0.6693, "lr": 7.840323733655778e-05, "epoch": 1.5658536585365854, "percentage": 31.15, "elapsed_time": "1:54:27", "remaining_time": "4:12:56", "throughput": 4590.34, "total_tokens": 31524768}
94
+ {"current_steps": 82, "total_steps": 260, "loss": 0.6718, "lr": 7.790396831795792e-05, "epoch": 1.5853658536585367, "percentage": 31.54, "elapsed_time": "1:55:53", "remaining_time": "4:11:33", "throughput": 4590.33, "total_tokens": 31917600}
95
+ {"current_steps": 83, "total_steps": 260, "loss": 0.67, "lr": 7.740062536773352e-05, "epoch": 1.604878048780488, "percentage": 31.92, "elapsed_time": "1:57:18", "remaining_time": "4:10:10", "throughput": 4590.35, "total_tokens": 32310432}
96
+ {"current_steps": 84, "total_steps": 260, "loss": 0.6688, "lr": 7.689328197309393e-05, "epoch": 1.6243902439024391, "percentage": 32.31, "elapsed_time": "1:58:44", "remaining_time": "4:08:47", "throughput": 4590.37, "total_tokens": 32703264}
97
+ {"current_steps": 85, "total_steps": 260, "loss": 0.6672, "lr": 7.638201220530665e-05, "epoch": 1.6439024390243904, "percentage": 32.69, "elapsed_time": "2:00:09", "remaining_time": "4:07:23", "throughput": 4590.43, "total_tokens": 33096096}
98
+ {"current_steps": 86, "total_steps": 260, "loss": 0.6573, "lr": 7.586689070888284e-05, "epoch": 1.6634146341463416, "percentage": 33.08, "elapsed_time": "2:01:35", "remaining_time": "4:06:00", "throughput": 4590.48, "total_tokens": 33488928}
99
+ {"current_steps": 87, "total_steps": 260, "loss": 0.6422, "lr": 7.534799269067953e-05, "epoch": 1.6829268292682928, "percentage": 33.46, "elapsed_time": "2:03:00", "remaining_time": "4:04:36", "throughput": 4590.56, "total_tokens": 33881760}
100
+ {"current_steps": 88, "total_steps": 260, "loss": 0.6269, "lr": 7.48253939089194e-05, "epoch": 1.7024390243902439, "percentage": 33.85, "elapsed_time": "2:04:26", "remaining_time": "4:03:12", "throughput": 4590.66, "total_tokens": 34274592}
101
+ {"current_steps": 89, "total_steps": 260, "loss": 0.6357, "lr": 7.42991706621303e-05, "epoch": 1.721951219512195, "percentage": 34.23, "elapsed_time": "2:05:51", "remaining_time": "4:01:49", "throughput": 4590.66, "total_tokens": 34667424}
102
+ {"current_steps": 90, "total_steps": 260, "loss": 0.6299, "lr": 7.376939977800582e-05, "epoch": 1.7414634146341463, "percentage": 34.62, "elapsed_time": "2:07:17", "remaining_time": "4:00:26", "throughput": 4590.64, "total_tokens": 35060256}
103
+ {"current_steps": 91, "total_steps": 260, "loss": 0.634, "lr": 7.323615860218843e-05, "epoch": 1.7609756097560976, "percentage": 35.0, "elapsed_time": "2:08:42", "remaining_time": "3:59:02", "throughput": 4590.66, "total_tokens": 35453088}
104
+ {"current_steps": 92, "total_steps": 260, "loss": 0.6251, "lr": 7.269952498697734e-05, "epoch": 1.7804878048780488, "percentage": 35.38, "elapsed_time": "2:10:08", "remaining_time": "3:57:38", "throughput": 4590.73, "total_tokens": 35845920}
105
+ {"current_steps": 93, "total_steps": 260, "loss": 0.62, "lr": 7.215957727996207e-05, "epoch": 1.8, "percentage": 35.77, "elapsed_time": "2:11:33", "remaining_time": "3:56:14", "throughput": 4590.83, "total_tokens": 36238752}
106
+ {"current_steps": 94, "total_steps": 260, "loss": 0.6073, "lr": 7.161639431258387e-05, "epoch": 1.819512195121951, "percentage": 36.15, "elapsed_time": "2:12:59", "remaining_time": "3:54:50", "throughput": 4590.9, "total_tokens": 36631584}
107
+ {"current_steps": 95, "total_steps": 260, "loss": 0.6074, "lr": 7.107005538862646e-05, "epoch": 1.8390243902439023, "percentage": 36.54, "elapsed_time": "2:14:24", "remaining_time": "3:53:27", "throughput": 4590.94, "total_tokens": 37024416}
108
+ {"current_steps": 96, "total_steps": 260, "loss": 0.6064, "lr": 7.052064027263786e-05, "epoch": 1.8585365853658535, "percentage": 36.92, "elapsed_time": "2:15:50", "remaining_time": "3:52:03", "throughput": 4591.01, "total_tokens": 37417248}
109
+ {"current_steps": 97, "total_steps": 260, "loss": 0.5922, "lr": 6.996822917828477e-05, "epoch": 1.8780487804878048, "percentage": 37.31, "elapsed_time": "2:17:15", "remaining_time": "3:50:39", "throughput": 4591.05, "total_tokens": 37810080}
110
+ {"current_steps": 98, "total_steps": 260, "loss": 0.5939, "lr": 6.941290275664174e-05, "epoch": 1.897560975609756, "percentage": 37.69, "elapsed_time": "2:18:41", "remaining_time": "3:49:15", "throughput": 4591.13, "total_tokens": 38202912}
111
+ {"current_steps": 99, "total_steps": 260, "loss": 0.5913, "lr": 6.885474208441603e-05, "epoch": 1.9170731707317072, "percentage": 38.08, "elapsed_time": "2:20:06", "remaining_time": "3:47:51", "throughput": 4591.19, "total_tokens": 38595744}
112
+ {"current_steps": 100, "total_steps": 260, "loss": 0.5863, "lr": 6.829382865211063e-05, "epoch": 1.9365853658536585, "percentage": 38.46, "elapsed_time": "2:21:31", "remaining_time": "3:46:27", "throughput": 4591.22, "total_tokens": 38988576}
113
+ {"current_steps": 101, "total_steps": 260, "loss": 0.5822, "lr": 6.773024435212678e-05, "epoch": 1.9560975609756097, "percentage": 38.85, "elapsed_time": "2:22:57", "remaining_time": "3:45:03", "throughput": 4591.26, "total_tokens": 39381408}
114
+ {"current_steps": 102, "total_steps": 260, "loss": 0.5666, "lr": 6.716407146680792e-05, "epoch": 1.975609756097561, "percentage": 39.23, "elapsed_time": "2:24:22", "remaining_time": "3:43:39", "throughput": 4591.31, "total_tokens": 39774240}
115
+ {"current_steps": 103, "total_steps": 260, "loss": 0.5806, "lr": 6.659539265642643e-05, "epoch": 1.9951219512195122, "percentage": 39.62, "elapsed_time": "2:25:48", "remaining_time": "3:42:14", "throughput": 4591.37, "total_tokens": 40167072}
116
+ {"current_steps": 104, "total_steps": 260, "loss": 0.5605, "lr": 6.602429094711548e-05, "epoch": 2.0, "percentage": 40.0, "elapsed_time": "2:26:09", "remaining_time": "3:39:14", "throughput": 4591.4, "total_tokens": 40265280}
117
+ {"current_steps": 105, "total_steps": 260, "loss": 0.5462, "lr": 6.545084971874738e-05, "epoch": 2.0195121951219512, "percentage": 40.38, "elapsed_time": "2:27:35", "remaining_time": "3:37:52", "throughput": 4591.42, "total_tokens": 40658112}
118
+ {"current_steps": 106, "total_steps": 260, "loss": 0.5447, "lr": 6.487515269276016e-05, "epoch": 2.0390243902439025, "percentage": 40.77, "elapsed_time": "2:29:00", "remaining_time": "3:36:29", "throughput": 4591.45, "total_tokens": 41050944}
119
+ {"current_steps": 107, "total_steps": 260, "loss": 0.5405, "lr": 6.429728391993446e-05, "epoch": 2.0585365853658537, "percentage": 41.15, "elapsed_time": "2:30:26", "remaining_time": "3:35:06", "throughput": 4591.48, "total_tokens": 41443776}
120
+ {"current_steps": 108, "total_steps": 260, "loss": 0.5433, "lr": 6.37173277681223e-05, "epoch": 2.078048780487805, "percentage": 41.54, "elapsed_time": "2:31:51", "remaining_time": "3:33:43", "throughput": 4591.51, "total_tokens": 41836608}
121
+ {"current_steps": 109, "total_steps": 260, "loss": 0.5335, "lr": 6.313536890992935e-05, "epoch": 2.097560975609756, "percentage": 41.92, "elapsed_time": "2:33:17", "remaining_time": "3:32:21", "throughput": 4591.46, "total_tokens": 42229440}
122
+ {"current_steps": 110, "total_steps": 260, "loss": 0.5416, "lr": 6.255149231035309e-05, "epoch": 2.1170731707317074, "percentage": 42.31, "elapsed_time": "2:34:42", "remaining_time": "3:30:58", "throughput": 4591.44, "total_tokens": 42622272}
123
+ {"current_steps": 111, "total_steps": 260, "loss": 0.5322, "lr": 6.19657832143779e-05, "epoch": 2.1365853658536587, "percentage": 42.69, "elapsed_time": "2:36:08", "remaining_time": "3:29:35", "throughput": 4591.43, "total_tokens": 43015104}
124
+ {"current_steps": 112, "total_steps": 260, "loss": 0.5301, "lr": 6.13783271345295e-05, "epoch": 2.15609756097561, "percentage": 43.08, "elapsed_time": "2:37:34", "remaining_time": "3:28:13", "throughput": 4591.39, "total_tokens": 43407936}
125
+ {"current_steps": 113, "total_steps": 260, "loss": 0.5422, "lr": 6.078920983839031e-05, "epoch": 2.175609756097561, "percentage": 43.46, "elapsed_time": "2:38:59", "remaining_time": "3:26:50", "throughput": 4591.39, "total_tokens": 43800768}
126
+ {"current_steps": 114, "total_steps": 260, "loss": 0.5352, "lr": 6.019851733607744e-05, "epoch": 2.1951219512195124, "percentage": 43.85, "elapsed_time": "2:40:25", "remaining_time": "3:25:27", "throughput": 4591.43, "total_tokens": 44193600}
127
+ {"current_steps": 115, "total_steps": 260, "loss": 0.5112, "lr": 5.960633586768543e-05, "epoch": 2.2146341463414636, "percentage": 44.23, "elapsed_time": "2:41:50", "remaining_time": "3:24:04", "throughput": 4591.41, "total_tokens": 44586432}
128
+ {"current_steps": 116, "total_steps": 260, "loss": 0.5171, "lr": 5.90127518906953e-05, "epoch": 2.234146341463415, "percentage": 44.62, "elapsed_time": "2:43:16", "remaining_time": "3:22:40", "throughput": 4591.44, "total_tokens": 44979264}
129
+ {"current_steps": 117, "total_steps": 260, "loss": 0.5027, "lr": 5.841785206735192e-05, "epoch": 2.253658536585366, "percentage": 45.0, "elapsed_time": "2:44:42", "remaining_time": "3:21:18", "throughput": 4591.38, "total_tokens": 45372096}
130
+ {"current_steps": 118, "total_steps": 260, "loss": 0.5151, "lr": 5.782172325201155e-05, "epoch": 2.2731707317073173, "percentage": 45.38, "elapsed_time": "2:46:07", "remaining_time": "3:19:54", "throughput": 4591.36, "total_tokens": 45764928}
131
+ {"current_steps": 119, "total_steps": 260, "loss": 0.5152, "lr": 5.7224452478461064e-05, "epoch": 2.292682926829268, "percentage": 45.77, "elapsed_time": "2:47:33", "remaining_time": "3:18:31", "throughput": 4591.37, "total_tokens": 46157760}
132
+ {"current_steps": 120, "total_steps": 260, "loss": 0.5303, "lr": 5.6626126947211386e-05, "epoch": 2.3121951219512193, "percentage": 46.15, "elapsed_time": "2:48:58", "remaining_time": "3:17:08", "throughput": 4591.4, "total_tokens": 46550592}
133
+ {"current_steps": 121, "total_steps": 260, "loss": 0.5032, "lr": 5.602683401276615e-05, "epoch": 2.3317073170731706, "percentage": 46.54, "elapsed_time": "2:50:24", "remaining_time": "3:15:45", "throughput": 4591.3, "total_tokens": 46943424}
134
+ {"current_steps": 122, "total_steps": 260, "loss": 0.5119, "lr": 5.542666117086832e-05, "epoch": 2.351219512195122, "percentage": 46.92, "elapsed_time": "2:51:49", "remaining_time": "3:14:22", "throughput": 4591.31, "total_tokens": 47336256}
135
+ {"current_steps": 123, "total_steps": 260, "loss": 0.5045, "lr": 5.482569604572576e-05, "epoch": 2.370731707317073, "percentage": 47.31, "elapsed_time": "2:53:15", "remaining_time": "3:12:58", "throughput": 4591.25, "total_tokens": 47729088}
136
+ {"current_steps": 124, "total_steps": 260, "loss": 0.505, "lr": 5.422402637721836e-05, "epoch": 2.3902439024390243, "percentage": 47.69, "elapsed_time": "2:54:41", "remaining_time": "3:11:35", "throughput": 4591.14, "total_tokens": 48121920}
137
+ {"current_steps": 125, "total_steps": 260, "loss": 0.5017, "lr": 5.3621740008088126e-05, "epoch": 2.4097560975609755, "percentage": 48.08, "elapsed_time": "2:56:07", "remaining_time": "3:10:12", "throughput": 4591.09, "total_tokens": 48514752}
138
+ {"current_steps": 126, "total_steps": 260, "loss": 0.5019, "lr": 5.3018924871114305e-05, "epoch": 2.4292682926829268, "percentage": 48.46, "elapsed_time": "2:57:32", "remaining_time": "3:08:49", "throughput": 4591.12, "total_tokens": 48907584}
139
+ {"current_steps": 127, "total_steps": 260, "loss": 0.4921, "lr": 5.2415668976275355e-05, "epoch": 2.448780487804878, "percentage": 48.85, "elapsed_time": "2:58:58", "remaining_time": "3:07:25", "throughput": 4591.12, "total_tokens": 49300416}
140
+ {"current_steps": 128, "total_steps": 260, "loss": 0.4951, "lr": 5.181206039789962e-05, "epoch": 2.4682926829268292, "percentage": 49.23, "elapsed_time": "3:00:23", "remaining_time": "3:06:01", "throughput": 4591.15, "total_tokens": 49693248}
141
+ {"current_steps": 129, "total_steps": 260, "loss": 0.5076, "lr": 5.1208187261806615e-05, "epoch": 2.4878048780487805, "percentage": 49.62, "elapsed_time": "3:01:49", "remaining_time": "3:04:38", "throughput": 4591.19, "total_tokens": 50086080}
142
+ {"current_steps": 130, "total_steps": 260, "loss": 0.4974, "lr": 5.060413773244087e-05, "epoch": 2.5073170731707317, "percentage": 50.0, "elapsed_time": "3:03:14", "remaining_time": "3:03:14", "throughput": 4591.16, "total_tokens": 50478912}
143
+ {"current_steps": 131, "total_steps": 260, "loss": 0.5063, "lr": 5e-05, "epoch": 2.526829268292683, "percentage": 50.38, "elapsed_time": "3:04:40", "remaining_time": "3:01:51", "throughput": 4591.19, "total_tokens": 50871744}
144
+ {"current_steps": 132, "total_steps": 260, "loss": 0.5093, "lr": 4.9395862267559136e-05, "epoch": 2.546341463414634, "percentage": 50.77, "elapsed_time": "3:06:05", "remaining_time": "3:00:27", "throughput": 4591.2, "total_tokens": 51264576}
145
+ {"current_steps": 133, "total_steps": 260, "loss": 0.5019, "lr": 4.87918127381934e-05, "epoch": 2.5658536585365854, "percentage": 51.15, "elapsed_time": "3:07:31", "remaining_time": "2:59:03", "throughput": 4591.18, "total_tokens": 51657408}
146
+ {"current_steps": 134, "total_steps": 260, "loss": 0.4836, "lr": 4.81879396021004e-05, "epoch": 2.5853658536585367, "percentage": 51.54, "elapsed_time": "3:08:57", "remaining_time": "2:57:40", "throughput": 4591.15, "total_tokens": 52050240}
147
+ {"current_steps": 135, "total_steps": 260, "loss": 0.4878, "lr": 4.758433102372466e-05, "epoch": 2.604878048780488, "percentage": 51.92, "elapsed_time": "3:10:22", "remaining_time": "2:56:16", "throughput": 4591.09, "total_tokens": 52443072}
148
+ {"current_steps": 136, "total_steps": 260, "loss": 0.4895, "lr": 4.6981075128885693e-05, "epoch": 2.624390243902439, "percentage": 52.31, "elapsed_time": "3:11:48", "remaining_time": "2:54:53", "throughput": 4591.03, "total_tokens": 52835904}
149
+ {"current_steps": 137, "total_steps": 260, "loss": 0.4987, "lr": 4.6378259991911886e-05, "epoch": 2.6439024390243904, "percentage": 52.69, "elapsed_time": "3:13:14", "remaining_time": "2:53:29", "throughput": 4590.96, "total_tokens": 53228736}
150
+ {"current_steps": 138, "total_steps": 260, "loss": 0.4952, "lr": 4.5775973622781655e-05, "epoch": 2.6634146341463416, "percentage": 53.08, "elapsed_time": "3:14:39", "remaining_time": "2:52:05", "throughput": 4590.92, "total_tokens": 53621568}
151
+ {"current_steps": 139, "total_steps": 260, "loss": 0.4852, "lr": 4.5174303954274244e-05, "epoch": 2.682926829268293, "percentage": 53.46, "elapsed_time": "3:16:05", "remaining_time": "2:50:41", "throughput": 4590.88, "total_tokens": 54014400}
152
+ {"current_steps": 140, "total_steps": 260, "loss": 0.4966, "lr": 4.457333882913169e-05, "epoch": 2.7024390243902436, "percentage": 53.85, "elapsed_time": "3:17:31", "remaining_time": "2:49:18", "throughput": 4590.84, "total_tokens": 54407232}
153
+ {"current_steps": 141, "total_steps": 260, "loss": 0.4851, "lr": 4.397316598723385e-05, "epoch": 2.721951219512195, "percentage": 54.23, "elapsed_time": "3:18:56", "remaining_time": "2:47:54", "throughput": 4590.88, "total_tokens": 54800064}
154
+ {"current_steps": 142, "total_steps": 260, "loss": 0.4897, "lr": 4.337387305278864e-05, "epoch": 2.741463414634146, "percentage": 54.62, "elapsed_time": "3:20:22", "remaining_time": "2:46:30", "throughput": 4590.9, "total_tokens": 55192896}
155
+ {"current_steps": 143, "total_steps": 260, "loss": 0.4992, "lr": 4.277554752153895e-05, "epoch": 2.7609756097560973, "percentage": 55.0, "elapsed_time": "3:21:47", "remaining_time": "2:45:06", "throughput": 4590.89, "total_tokens": 55585728}
156
+ {"current_steps": 144, "total_steps": 260, "loss": 0.4842, "lr": 4.2178276747988446e-05, "epoch": 2.7804878048780486, "percentage": 55.38, "elapsed_time": "3:23:13", "remaining_time": "2:43:42", "throughput": 4590.89, "total_tokens": 55978560}
157
+ {"current_steps": 145, "total_steps": 260, "loss": 0.4941, "lr": 4.1582147932648074e-05, "epoch": 2.8, "percentage": 55.77, "elapsed_time": "3:24:38", "remaining_time": "2:42:18", "throughput": 4590.88, "total_tokens": 56371392}
158
+ {"current_steps": 146, "total_steps": 260, "loss": 0.4933, "lr": 4.0987248109304714e-05, "epoch": 2.819512195121951, "percentage": 56.15, "elapsed_time": "3:26:04", "remaining_time": "2:40:54", "throughput": 4590.9, "total_tokens": 56764224}
159
+ {"current_steps": 147, "total_steps": 260, "loss": 0.4847, "lr": 4.039366413231458e-05, "epoch": 2.8390243902439023, "percentage": 56.54, "elapsed_time": "3:27:30", "remaining_time": "2:39:30", "throughput": 4590.91, "total_tokens": 57157056}
160
+ {"current_steps": 148, "total_steps": 260, "loss": 0.492, "lr": 3.980148266392258e-05, "epoch": 2.8585365853658535, "percentage": 56.92, "elapsed_time": "3:28:55", "remaining_time": "2:38:06", "throughput": 4590.93, "total_tokens": 57549888}
161
+ {"current_steps": 149, "total_steps": 260, "loss": 0.4933, "lr": 3.92107901616097e-05, "epoch": 2.8780487804878048, "percentage": 57.31, "elapsed_time": "3:30:21", "remaining_time": "2:36:42", "throughput": 4590.96, "total_tokens": 57942720}
162
+ {"current_steps": 150, "total_steps": 260, "loss": 0.4812, "lr": 3.86216728654705e-05, "epoch": 2.897560975609756, "percentage": 57.69, "elapsed_time": "3:31:46", "remaining_time": "2:35:18", "throughput": 4590.93, "total_tokens": 58335552}
163
+ {"current_steps": 151, "total_steps": 260, "loss": 0.4822, "lr": 3.803421678562213e-05, "epoch": 2.9170731707317072, "percentage": 58.08, "elapsed_time": "3:33:12", "remaining_time": "2:33:54", "throughput": 4590.97, "total_tokens": 58728384}
164
+ {"current_steps": 152, "total_steps": 260, "loss": 0.4814, "lr": 3.744850768964692e-05, "epoch": 2.9365853658536585, "percentage": 58.46, "elapsed_time": "3:34:37", "remaining_time": "2:32:29", "throughput": 4590.99, "total_tokens": 59121216}
165
+ {"current_steps": 153, "total_steps": 260, "loss": 0.4921, "lr": 3.6864631090070655e-05, "epoch": 2.9560975609756097, "percentage": 58.85, "elapsed_time": "3:36:03", "remaining_time": "2:31:05", "throughput": 4591.02, "total_tokens": 59514048}
166
+ {"current_steps": 154, "total_steps": 260, "loss": 0.4768, "lr": 3.628267223187771e-05, "epoch": 2.975609756097561, "percentage": 59.23, "elapsed_time": "3:37:28", "remaining_time": "2:29:41", "throughput": 4591.06, "total_tokens": 59906880}
167
+ {"current_steps": 155, "total_steps": 260, "loss": 0.4579, "lr": 3.570271608006555e-05, "epoch": 2.995121951219512, "percentage": 59.62, "elapsed_time": "3:38:54", "remaining_time": "2:28:17", "throughput": 4591.02, "total_tokens": 60299712}
168
+ {"current_steps": 156, "total_steps": 260, "loss": 0.4418, "lr": 3.512484730723986e-05, "epoch": 3.0, "percentage": 60.0, "elapsed_time": "3:39:15", "remaining_time": "2:26:10", "throughput": 4591.0, "total_tokens": 60397920}
169
+ {"current_steps": 157, "total_steps": 260, "loss": 0.4471, "lr": 3.4549150281252636e-05, "epoch": 3.0195121951219512, "percentage": 60.38, "elapsed_time": "3:40:41", "remaining_time": "2:24:46", "throughput": 4591.01, "total_tokens": 60790752}
170
+ {"current_steps": 158, "total_steps": 260, "loss": 0.4543, "lr": 3.397570905288453e-05, "epoch": 3.0390243902439025, "percentage": 60.77, "elapsed_time": "3:42:06", "remaining_time": "2:23:23", "throughput": 4591.0, "total_tokens": 61183584}
171
+ {"current_steps": 159, "total_steps": 260, "loss": 0.4561, "lr": 3.340460734357359e-05, "epoch": 3.0585365853658537, "percentage": 61.15, "elapsed_time": "3:43:32", "remaining_time": "2:21:59", "throughput": 4591.0, "total_tokens": 61576416}
172
+ {"current_steps": 160, "total_steps": 260, "loss": 0.4521, "lr": 3.283592853319209e-05, "epoch": 3.078048780487805, "percentage": 61.54, "elapsed_time": "3:44:57", "remaining_time": "2:20:36", "throughput": 4591.0, "total_tokens": 61969248}
173
+ {"current_steps": 161, "total_steps": 260, "loss": 0.444, "lr": 3.226975564787322e-05, "epoch": 3.097560975609756, "percentage": 61.92, "elapsed_time": "3:46:23", "remaining_time": "2:19:12", "throughput": 4590.93, "total_tokens": 62362080}
174
+ {"current_steps": 162, "total_steps": 260, "loss": 0.4541, "lr": 3.170617134788939e-05, "epoch": 3.1170731707317074, "percentage": 62.31, "elapsed_time": "3:47:49", "remaining_time": "2:17:49", "throughput": 4590.8, "total_tokens": 62754912}
175
+ {"current_steps": 163, "total_steps": 260, "loss": 0.4582, "lr": 3.114525791558398e-05, "epoch": 3.1365853658536587, "percentage": 62.69, "elapsed_time": "3:49:15", "remaining_time": "2:16:25", "throughput": 4590.82, "total_tokens": 63147744}
176
+ {"current_steps": 164, "total_steps": 260, "loss": 0.4429, "lr": 3.0587097243358253e-05, "epoch": 3.15609756097561, "percentage": 63.08, "elapsed_time": "3:50:40", "remaining_time": "2:15:01", "throughput": 4590.83, "total_tokens": 63540576}
177
+ {"current_steps": 165, "total_steps": 260, "loss": 0.4443, "lr": 3.003177082171523e-05, "epoch": 3.175609756097561, "percentage": 63.46, "elapsed_time": "3:52:06", "remaining_time": "2:13:38", "throughput": 4590.83, "total_tokens": 63933408}
178
+ {"current_steps": 166, "total_steps": 260, "loss": 0.4521, "lr": 2.9479359727362173e-05, "epoch": 3.1951219512195124, "percentage": 63.85, "elapsed_time": "3:53:31", "remaining_time": "2:12:14", "throughput": 4590.81, "total_tokens": 64326240}
179
+ {"current_steps": 167, "total_steps": 260, "loss": 0.4362, "lr": 2.8929944611373554e-05, "epoch": 3.2146341463414636, "percentage": 64.23, "elapsed_time": "3:54:58", "remaining_time": "2:10:51", "throughput": 4590.59, "total_tokens": 64719072}
180
+ {"current_steps": 168, "total_steps": 260, "loss": 0.4503, "lr": 2.8383605687416125e-05, "epoch": 3.234146341463415, "percentage": 64.62, "elapsed_time": "3:56:23", "remaining_time": "2:09:27", "throughput": 4590.61, "total_tokens": 65111904}
181
+ {"current_steps": 169, "total_steps": 260, "loss": 0.448, "lr": 2.784042272003794e-05, "epoch": 3.253658536585366, "percentage": 65.0, "elapsed_time": "3:57:49", "remaining_time": "2:08:03", "throughput": 4590.63, "total_tokens": 65504736}
182
+ {"current_steps": 170, "total_steps": 260, "loss": 0.4495, "lr": 2.7300475013022663e-05, "epoch": 3.2731707317073173, "percentage": 65.38, "elapsed_time": "3:59:14", "remaining_time": "2:06:39", "throughput": 4590.66, "total_tokens": 65897568}
183
+ {"current_steps": 171, "total_steps": 260, "loss": 0.4512, "lr": 2.6763841397811573e-05, "epoch": 3.292682926829268, "percentage": 65.77, "elapsed_time": "4:00:40", "remaining_time": "2:05:15", "throughput": 4590.67, "total_tokens": 66290400}
184
+ {"current_steps": 172, "total_steps": 260, "loss": 0.4342, "lr": 2.6230600221994196e-05, "epoch": 3.3121951219512193, "percentage": 66.15, "elapsed_time": "4:02:05", "remaining_time": "2:03:51", "throughput": 4590.69, "total_tokens": 66683232}
185
+ {"current_steps": 173, "total_steps": 260, "loss": 0.4456, "lr": 2.57008293378697e-05, "epoch": 3.3317073170731706, "percentage": 66.54, "elapsed_time": "4:03:31", "remaining_time": "2:02:27", "throughput": 4590.71, "total_tokens": 67076064}
186
+ {"current_steps": 174, "total_steps": 260, "loss": 0.4459, "lr": 2.5174606091080627e-05, "epoch": 3.351219512195122, "percentage": 66.92, "elapsed_time": "4:04:56", "remaining_time": "2:01:03", "throughput": 4590.73, "total_tokens": 67468896}
187
+ {"current_steps": 175, "total_steps": 260, "loss": 0.4383, "lr": 2.4652007309320498e-05, "epoch": 3.370731707317073, "percentage": 67.31, "elapsed_time": "4:06:22", "remaining_time": "1:59:39", "throughput": 4590.76, "total_tokens": 67861728}
188
+ {"current_steps": 176, "total_steps": 260, "loss": 0.4447, "lr": 2.4133109291117156e-05, "epoch": 3.3902439024390243, "percentage": 67.69, "elapsed_time": "4:07:47", "remaining_time": "1:58:15", "throughput": 4590.8, "total_tokens": 68254560}
189
+ {"current_steps": 177, "total_steps": 260, "loss": 0.442, "lr": 2.361798779469336e-05, "epoch": 3.4097560975609755, "percentage": 68.08, "elapsed_time": "4:09:13", "remaining_time": "1:56:51", "throughput": 4590.83, "total_tokens": 68647392}
190
+ {"current_steps": 178, "total_steps": 260, "loss": 0.4418, "lr": 2.3106718026906072e-05, "epoch": 3.4292682926829268, "percentage": 68.46, "elapsed_time": "4:10:38", "remaining_time": "1:55:27", "throughput": 4590.84, "total_tokens": 69040224}
191
+ {"current_steps": 179, "total_steps": 260, "loss": 0.4325, "lr": 2.259937463226651e-05, "epoch": 3.448780487804878, "percentage": 68.85, "elapsed_time": "4:12:04", "remaining_time": "1:54:03", "throughput": 4590.86, "total_tokens": 69433056}
192
+ {"current_steps": 180, "total_steps": 260, "loss": 0.4319, "lr": 2.209603168204209e-05, "epoch": 3.4682926829268292, "percentage": 69.23, "elapsed_time": "4:13:29", "remaining_time": "1:52:39", "throughput": 4590.87, "total_tokens": 69825888}
193
+ {"current_steps": 181, "total_steps": 260, "loss": 0.4401, "lr": 2.1596762663442218e-05, "epoch": 3.4878048780487805, "percentage": 69.62, "elapsed_time": "4:14:55", "remaining_time": "1:51:15", "throughput": 4590.89, "total_tokens": 70218720}
194
+ {"current_steps": 182, "total_steps": 260, "loss": 0.4386, "lr": 2.1101640468889255e-05, "epoch": 3.5073170731707317, "percentage": 70.0, "elapsed_time": "4:16:20", "remaining_time": "1:49:51", "throughput": 4590.91, "total_tokens": 70611552}
195
+ {"current_steps": 183, "total_steps": 260, "loss": 0.4202, "lr": 2.061073738537635e-05, "epoch": 3.526829268292683, "percentage": 70.38, "elapsed_time": "4:17:46", "remaining_time": "1:48:27", "throughput": 4590.93, "total_tokens": 71004384}
196
+ {"current_steps": 184, "total_steps": 260, "loss": 0.4419, "lr": 2.0124125083913637e-05, "epoch": 3.546341463414634, "percentage": 70.77, "elapsed_time": "4:19:11", "remaining_time": "1:47:03", "throughput": 4590.89, "total_tokens": 71397216}
197
+ {"current_steps": 185, "total_steps": 260, "loss": 0.4336, "lr": 1.9641874609064443e-05, "epoch": 3.5658536585365854, "percentage": 71.15, "elapsed_time": "4:20:37", "remaining_time": "1:45:39", "throughput": 4590.88, "total_tokens": 71790048}
198
+ {"current_steps": 186, "total_steps": 260, "loss": 0.4387, "lr": 1.9164056368572846e-05, "epoch": 3.5853658536585367, "percentage": 71.54, "elapsed_time": "4:22:03", "remaining_time": "1:44:15", "throughput": 4590.84, "total_tokens": 72182880}
199
+ {"current_steps": 187, "total_steps": 260, "loss": 0.4297, "lr": 1.8690740123084316e-05, "epoch": 3.604878048780488, "percentage": 71.92, "elapsed_time": "4:23:28", "remaining_time": "1:42:51", "throughput": 4590.84, "total_tokens": 72575712}
200
+ {"current_steps": 188, "total_steps": 260, "loss": 0.446, "lr": 1.8221994975960736e-05, "epoch": 3.624390243902439, "percentage": 72.31, "elapsed_time": "4:24:54", "remaining_time": "1:41:27", "throughput": 4590.83, "total_tokens": 72968544}
201
+ {"current_steps": 189, "total_steps": 260, "loss": 0.4318, "lr": 1.7757889363191483e-05, "epoch": 3.6439024390243904, "percentage": 72.69, "elapsed_time": "4:26:20", "remaining_time": "1:40:03", "throughput": 4590.81, "total_tokens": 73361376}
202
+ {"current_steps": 190, "total_steps": 260, "loss": 0.4393, "lr": 1.7298491043401795e-05, "epoch": 3.6634146341463416, "percentage": 73.08, "elapsed_time": "4:27:45", "remaining_time": "1:38:38", "throughput": 4590.82, "total_tokens": 73754208}
203
+ {"current_steps": 191, "total_steps": 260, "loss": 0.4437, "lr": 1.684386708796025e-05, "epoch": 3.682926829268293, "percentage": 73.46, "elapsed_time": "4:29:11", "remaining_time": "1:37:14", "throughput": 4590.85, "total_tokens": 74147040}
204
+ {"current_steps": 192, "total_steps": 260, "loss": 0.4378, "lr": 1.6394083871186362e-05, "epoch": 3.7024390243902436, "percentage": 73.85, "elapsed_time": "4:30:36", "remaining_time": "1:35:50", "throughput": 4590.88, "total_tokens": 74539872}
205
+ {"current_steps": 193, "total_steps": 260, "loss": 0.4388, "lr": 1.5949207060660138e-05, "epoch": 3.721951219512195, "percentage": 74.23, "elapsed_time": "4:32:02", "remaining_time": "1:34:26", "throughput": 4590.9, "total_tokens": 74932704}
206
+ {"current_steps": 194, "total_steps": 260, "loss": 0.4483, "lr": 1.550930160763462e-05, "epoch": 3.741463414634146, "percentage": 74.62, "elapsed_time": "4:33:27", "remaining_time": "1:33:01", "throughput": 4590.93, "total_tokens": 75325536}
207
+ {"current_steps": 195, "total_steps": 260, "loss": 0.4336, "lr": 1.5074431737553157e-05, "epoch": 3.7609756097560973, "percentage": 75.0, "elapsed_time": "4:34:52", "remaining_time": "1:31:37", "throughput": 4590.98, "total_tokens": 75718368}
208
+ {"current_steps": 196, "total_steps": 260, "loss": 0.4334, "lr": 1.4644660940672627e-05, "epoch": 3.7804878048780486, "percentage": 75.38, "elapsed_time": "4:36:18", "remaining_time": "1:30:13", "throughput": 4591.02, "total_tokens": 76111200}
209
+ {"current_steps": 197, "total_steps": 260, "loss": 0.4352, "lr": 1.422005196279395e-05, "epoch": 3.8, "percentage": 75.77, "elapsed_time": "4:37:43", "remaining_time": "1:28:49", "throughput": 4591.03, "total_tokens": 76504032}
210
+ {"current_steps": 198, "total_steps": 260, "loss": 0.4315, "lr": 1.3800666796101292e-05, "epoch": 3.819512195121951, "percentage": 76.15, "elapsed_time": "4:39:09", "remaining_time": "1:27:24", "throughput": 4591.05, "total_tokens": 76896864}
211
+ {"current_steps": 199, "total_steps": 260, "loss": 0.4339, "lr": 1.338656667011134e-05, "epoch": 3.8390243902439023, "percentage": 76.54, "elapsed_time": "4:40:34", "remaining_time": "1:26:00", "throughput": 4591.09, "total_tokens": 77289696}
212
+ {"current_steps": 200, "total_steps": 260, "loss": 0.4374, "lr": 1.297781204273385e-05, "epoch": 3.8585365853658535, "percentage": 76.92, "elapsed_time": "4:42:00", "remaining_time": "1:24:36", "throughput": 4591.09, "total_tokens": 77682528}
213
+ {"current_steps": 201, "total_steps": 260, "loss": 0.4437, "lr": 1.257446259144494e-05, "epoch": 3.8780487804878048, "percentage": 77.31, "elapsed_time": "4:43:25", "remaining_time": "1:23:11", "throughput": 4591.09, "total_tokens": 78075360}
214
+ {"current_steps": 202, "total_steps": 260, "loss": 0.4319, "lr": 1.2176577204574318e-05, "epoch": 3.897560975609756, "percentage": 77.69, "elapsed_time": "4:44:51", "remaining_time": "1:21:47", "throughput": 4591.13, "total_tokens": 78468192}
215
+ {"current_steps": 203, "total_steps": 260, "loss": 0.4322, "lr": 1.178421397270758e-05, "epoch": 3.9170731707317072, "percentage": 78.08, "elapsed_time": "4:46:16", "remaining_time": "1:20:23", "throughput": 4591.15, "total_tokens": 78861024}
216
+ {"current_steps": 204, "total_steps": 260, "loss": 0.4375, "lr": 1.1397430180205171e-05, "epoch": 3.9365853658536585, "percentage": 78.46, "elapsed_time": "4:47:42", "remaining_time": "1:18:58", "throughput": 4591.08, "total_tokens": 79253856}
217
+ {"current_steps": 205, "total_steps": 260, "loss": 0.4182, "lr": 1.1016282296838887e-05, "epoch": 3.9560975609756097, "percentage": 78.85, "elapsed_time": "4:49:08", "remaining_time": "1:17:34", "throughput": 4591.11, "total_tokens": 79646688}
218
+ {"current_steps": 206, "total_steps": 260, "loss": 0.4382, "lr": 1.0640825969547496e-05, "epoch": 3.975609756097561, "percentage": 79.23, "elapsed_time": "4:50:33", "remaining_time": "1:16:09", "throughput": 4591.14, "total_tokens": 80039520}
219
+ {"current_steps": 207, "total_steps": 260, "loss": 0.4277, "lr": 1.0271116014312293e-05, "epoch": 3.995121951219512, "percentage": 79.62, "elapsed_time": "4:51:58", "remaining_time": "1:14:45", "throughput": 4591.15, "total_tokens": 80432352}
220
+ {"current_steps": 208, "total_steps": 260, "loss": 0.4029, "lr": 9.90720640815408e-06, "epoch": 4.0, "percentage": 80.0, "elapsed_time": "4:52:20", "remaining_time": "1:13:05", "throughput": 4591.17, "total_tokens": 80530560}
221
+ {"current_steps": 209, "total_steps": 260, "loss": 0.4243, "lr": 9.549150281252633e-06, "epoch": 4.019512195121951, "percentage": 80.38, "elapsed_time": "4:53:45", "remaining_time": "1:11:41", "throughput": 4591.2, "total_tokens": 80923392}
222
+ {"current_steps": 210, "total_steps": 260, "loss": 0.4146, "lr": 9.196999909189762e-06, "epoch": 4.0390243902439025, "percentage": 80.77, "elapsed_time": "4:55:11", "remaining_time": "1:10:17", "throughput": 4591.06, "total_tokens": 81316224}
223
+ {"current_steps": 211, "total_steps": 260, "loss": 0.4287, "lr": 8.850806705317183e-06, "epoch": 4.058536585365854, "percentage": 81.15, "elapsed_time": "4:56:37", "remaining_time": "1:08:53", "throughput": 4591.08, "total_tokens": 81709056}
224
+ {"current_steps": 212, "total_steps": 260, "loss": 0.4221, "lr": 8.510621213250247e-06, "epoch": 4.078048780487805, "percentage": 81.54, "elapsed_time": "4:58:02", "remaining_time": "1:07:28", "throughput": 4591.09, "total_tokens": 82101888}
225
+ {"current_steps": 213, "total_steps": 260, "loss": 0.4232, "lr": 8.176493099488663e-06, "epoch": 4.097560975609756, "percentage": 81.92, "elapsed_time": "4:59:28", "remaining_time": "1:06:04", "throughput": 4591.1, "total_tokens": 82494720}
226
+ {"current_steps": 214, "total_steps": 260, "loss": 0.416, "lr": 7.848471146165288e-06, "epoch": 4.117073170731707, "percentage": 82.31, "elapsed_time": "5:00:53", "remaining_time": "1:04:40", "throughput": 4591.1, "total_tokens": 82887552}
227
+ {"current_steps": 215, "total_steps": 260, "loss": 0.4157, "lr": 7.526603243923957e-06, "epoch": 4.136585365853659, "percentage": 82.69, "elapsed_time": "5:02:19", "remaining_time": "1:03:16", "throughput": 4591.08, "total_tokens": 83280384}
228
+ {"current_steps": 216, "total_steps": 260, "loss": 0.4174, "lr": 7.21093638492763e-06, "epoch": 4.15609756097561, "percentage": 83.08, "elapsed_time": "5:03:45", "remaining_time": "1:01:52", "throughput": 4591.06, "total_tokens": 83673216}
229
+ {"current_steps": 217, "total_steps": 260, "loss": 0.4041, "lr": 6.901516655997536e-06, "epoch": 4.175609756097561, "percentage": 83.46, "elapsed_time": "5:05:10", "remaining_time": "1:00:28", "throughput": 4591.04, "total_tokens": 84066048}
230
+ {"current_steps": 218, "total_steps": 260, "loss": 0.4155, "lr": 6.5983892318846275e-06, "epoch": 4.195121951219512, "percentage": 83.85, "elapsed_time": "5:06:36", "remaining_time": "0:59:04", "throughput": 4591.02, "total_tokens": 84458880}
231
+ {"current_steps": 219, "total_steps": 260, "loss": 0.4214, "lr": 6.301598368674105e-06, "epoch": 4.214634146341464, "percentage": 84.23, "elapsed_time": "5:08:02", "remaining_time": "0:57:40", "throughput": 4591.01, "total_tokens": 84851712}
232
+ {"current_steps": 220, "total_steps": 260, "loss": 0.4193, "lr": 6.011187397324114e-06, "epoch": 4.234146341463415, "percentage": 84.62, "elapsed_time": "5:09:27", "remaining_time": "0:56:15", "throughput": 4591.0, "total_tokens": 85244544}
233
+ {"current_steps": 221, "total_steps": 260, "loss": 0.4137, "lr": 5.727198717339511e-06, "epoch": 4.253658536585366, "percentage": 85.0, "elapsed_time": "5:10:53", "remaining_time": "0:54:51", "throughput": 4590.97, "total_tokens": 85637376}
234
+ {"current_steps": 222, "total_steps": 260, "loss": 0.4069, "lr": 5.449673790581611e-06, "epoch": 4.273170731707317, "percentage": 85.38, "elapsed_time": "5:12:19", "remaining_time": "0:53:27", "throughput": 4590.93, "total_tokens": 86030208}
235
+ {"current_steps": 223, "total_steps": 260, "loss": 0.4188, "lr": 5.178653135214812e-06, "epoch": 4.2926829268292686, "percentage": 85.77, "elapsed_time": "5:13:44", "remaining_time": "0:52:03", "throughput": 4590.91, "total_tokens": 86423040}
236
+ {"current_steps": 224, "total_steps": 260, "loss": 0.4133, "lr": 4.914176319791036e-06, "epoch": 4.31219512195122, "percentage": 86.15, "elapsed_time": "5:15:10", "remaining_time": "0:50:39", "throughput": 4590.88, "total_tokens": 86815872}
237
+ {"current_steps": 225, "total_steps": 260, "loss": 0.4136, "lr": 4.65628195747273e-06, "epoch": 4.331707317073171, "percentage": 86.54, "elapsed_time": "5:16:36", "remaining_time": "0:49:14", "throughput": 4590.84, "total_tokens": 87208704}
238
+ {"current_steps": 226, "total_steps": 260, "loss": 0.4066, "lr": 4.405007700395497e-06, "epoch": 4.351219512195122, "percentage": 86.92, "elapsed_time": "5:18:01", "remaining_time": "0:47:50", "throughput": 4590.8, "total_tokens": 87601536}
239
+ {"current_steps": 227, "total_steps": 260, "loss": 0.4002, "lr": 4.16039023417088e-06, "epoch": 4.3707317073170735, "percentage": 87.31, "elapsed_time": "5:19:27", "remaining_time": "0:46:26", "throughput": 4590.77, "total_tokens": 87994368}
240
+ {"current_steps": 228, "total_steps": 260, "loss": 0.4213, "lr": 3.922465272530351e-06, "epoch": 4.390243902439025, "percentage": 87.69, "elapsed_time": "5:20:53", "remaining_time": "0:45:02", "throughput": 4590.69, "total_tokens": 88387200}
241
+ {"current_steps": 229, "total_steps": 260, "loss": 0.4276, "lr": 3.691267552111183e-06, "epoch": 4.409756097560976, "percentage": 88.08, "elapsed_time": "5:22:19", "remaining_time": "0:43:37", "throughput": 4590.66, "total_tokens": 88780032}
242
+ {"current_steps": 230, "total_steps": 260, "loss": 0.417, "lr": 3.4668308273848982e-06, "epoch": 4.429268292682927, "percentage": 88.46, "elapsed_time": "5:23:44", "remaining_time": "0:42:13", "throughput": 4590.64, "total_tokens": 89172864}
243
+ {"current_steps": 231, "total_steps": 260, "loss": 0.4036, "lr": 3.249187865729264e-06, "epoch": 4.4487804878048784, "percentage": 88.85, "elapsed_time": "5:25:10", "remaining_time": "0:40:49", "throughput": 4590.63, "total_tokens": 89565696}
244
+ {"current_steps": 232, "total_steps": 260, "loss": 0.4109, "lr": 3.0383704426442394e-06, "epoch": 4.46829268292683, "percentage": 89.23, "elapsed_time": "5:26:36", "remaining_time": "0:39:25", "throughput": 4590.61, "total_tokens": 89958528}
245
+ {"current_steps": 233, "total_steps": 260, "loss": 0.4177, "lr": 2.8344093371128424e-06, "epoch": 4.487804878048781, "percentage": 89.62, "elapsed_time": "5:28:01", "remaining_time": "0:38:00", "throughput": 4590.59, "total_tokens": 90351360}
246
+ {"current_steps": 234, "total_steps": 260, "loss": 0.4167, "lr": 2.637334327107466e-06, "epoch": 4.507317073170732, "percentage": 90.0, "elapsed_time": "5:29:27", "remaining_time": "0:36:36", "throughput": 4590.59, "total_tokens": 90744192}
247
+ {"current_steps": 235, "total_steps": 260, "loss": 0.4214, "lr": 2.4471741852423237e-06, "epoch": 4.526829268292683, "percentage": 90.38, "elapsed_time": "5:30:52", "remaining_time": "0:35:12", "throughput": 4590.6, "total_tokens": 91137024}
248
+ {"current_steps": 236, "total_steps": 260, "loss": 0.4038, "lr": 2.2639566745727205e-06, "epoch": 4.546341463414635, "percentage": 90.77, "elapsed_time": "5:32:18", "remaining_time": "0:33:47", "throughput": 4590.59, "total_tokens": 91529856}
249
+ {"current_steps": 237, "total_steps": 260, "loss": 0.4087, "lr": 2.087708544541689e-06, "epoch": 4.565853658536585, "percentage": 91.15, "elapsed_time": "5:33:44", "remaining_time": "0:32:23", "throughput": 4590.59, "total_tokens": 91922688}
250
+ {"current_steps": 238, "total_steps": 260, "loss": 0.4099, "lr": 1.9184555270746194e-06, "epoch": 4.585365853658536, "percentage": 91.54, "elapsed_time": "5:35:09", "remaining_time": "0:30:58", "throughput": 4590.59, "total_tokens": 92315520}
251
+ {"current_steps": 239, "total_steps": 260, "loss": 0.4098, "lr": 1.7562223328224325e-06, "epoch": 4.6048780487804875, "percentage": 91.92, "elapsed_time": "5:36:35", "remaining_time": "0:29:34", "throughput": 4590.59, "total_tokens": 92708352}
252
+ {"current_steps": 240, "total_steps": 260, "loss": 0.4106, "lr": 1.601032647553863e-06, "epoch": 4.624390243902439, "percentage": 92.31, "elapsed_time": "5:38:00", "remaining_time": "0:28:10", "throughput": 4590.58, "total_tokens": 93101184}
253
+ {"current_steps": 241, "total_steps": 260, "loss": 0.4204, "lr": 1.4529091286973995e-06, "epoch": 4.64390243902439, "percentage": 92.69, "elapsed_time": "5:39:26", "remaining_time": "0:26:45", "throughput": 4590.58, "total_tokens": 93494016}
254
+ {"current_steps": 242, "total_steps": 260, "loss": 0.4069, "lr": 1.3118734020333256e-06, "epoch": 4.663414634146341, "percentage": 93.08, "elapsed_time": "5:40:52", "remaining_time": "0:25:21", "throughput": 4590.57, "total_tokens": 93886848}
255
+ {"current_steps": 243, "total_steps": 260, "loss": 0.4206, "lr": 1.1779460585363944e-06, "epoch": 4.682926829268292, "percentage": 93.46, "elapsed_time": "5:42:17", "remaining_time": "0:23:56", "throughput": 4590.59, "total_tokens": 94279680}
256
+ {"current_steps": 244, "total_steps": 260, "loss": 0.4216, "lr": 1.0511466513695777e-06, "epoch": 4.702439024390244, "percentage": 93.85, "elapsed_time": "5:43:43", "remaining_time": "0:22:32", "throughput": 4590.59, "total_tokens": 94672512}
257
+ {"current_steps": 245, "total_steps": 260, "loss": 0.4067, "lr": 9.314936930293283e-07, "epoch": 4.721951219512195, "percentage": 94.23, "elapsed_time": "5:45:09", "remaining_time": "0:21:07", "throughput": 4590.49, "total_tokens": 95065344}
258
+ {"current_steps": 246, "total_steps": 260, "loss": 0.4149, "lr": 8.190046526428242e-07, "epoch": 4.741463414634146, "percentage": 94.62, "elapsed_time": "5:46:34", "remaining_time": "0:19:43", "throughput": 4590.45, "total_tokens": 95458176}
259
+ {"current_steps": 247, "total_steps": 260, "loss": 0.4104, "lr": 7.136959534174592e-07, "epoch": 4.760975609756097, "percentage": 95.0, "elapsed_time": "5:48:00", "remaining_time": "0:18:18", "throughput": 4590.43, "total_tokens": 95851008}
260
+ {"current_steps": 248, "total_steps": 260, "loss": 0.4275, "lr": 6.15582970243117e-07, "epoch": 4.780487804878049, "percentage": 95.38, "elapsed_time": "5:49:26", "remaining_time": "0:16:54", "throughput": 4590.4, "total_tokens": 96243840}
261
+ {"current_steps": 249, "total_steps": 260, "loss": 0.4168, "lr": 5.246800274474439e-07, "epoch": 4.8, "percentage": 95.77, "elapsed_time": "5:50:52", "remaining_time": "0:15:30", "throughput": 4590.37, "total_tokens": 96636672}
262
+ {"current_steps": 250, "total_steps": 260, "loss": 0.4227, "lr": 4.4100039670454395e-07, "epoch": 4.819512195121951, "percentage": 96.15, "elapsed_time": "5:52:17", "remaining_time": "0:14:05", "throughput": 4590.32, "total_tokens": 97029504}
263
+ {"current_steps": 251, "total_steps": 260, "loss": 0.4224, "lr": 3.6455629509730136e-07, "epoch": 4.839024390243902, "percentage": 96.54, "elapsed_time": "5:53:43", "remaining_time": "0:12:41", "throughput": 4590.27, "total_tokens": 97422336}
264
+ {"current_steps": 252, "total_steps": 260, "loss": 0.409, "lr": 2.953588833337406e-07, "epoch": 4.8585365853658535, "percentage": 96.92, "elapsed_time": "5:55:09", "remaining_time": "0:11:16", "throughput": 4590.16, "total_tokens": 97815168}
265
+ {"current_steps": 253, "total_steps": 260, "loss": 0.4164, "lr": 2.334182641175686e-07, "epoch": 4.878048780487805, "percentage": 97.31, "elapsed_time": "5:56:35", "remaining_time": "0:09:51", "throughput": 4590.1, "total_tokens": 98208000}
266
+ {"current_steps": 254, "total_steps": 260, "loss": 0.4048, "lr": 1.7874348067319912e-07, "epoch": 4.897560975609756, "percentage": 97.69, "elapsed_time": "5:58:01", "remaining_time": "0:08:27", "throughput": 4590.07, "total_tokens": 98600832}
267
+ {"current_steps": 255, "total_steps": 260, "loss": 0.4082, "lr": 1.3134251542544774e-07, "epoch": 4.917073170731707, "percentage": 98.08, "elapsed_time": "5:59:27", "remaining_time": "0:07:02", "throughput": 4590.03, "total_tokens": 98993664}
268
+ {"current_steps": 256, "total_steps": 260, "loss": 0.4214, "lr": 9.12222888341252e-08, "epoch": 4.9365853658536585, "percentage": 98.46, "elapsed_time": "6:00:52", "remaining_time": "0:05:38", "throughput": 4590.01, "total_tokens": 99386496}
269
+ {"current_steps": 257, "total_steps": 260, "loss": 0.413, "lr": 5.838865838366792e-08, "epoch": 4.95609756097561, "percentage": 98.85, "elapsed_time": "6:02:18", "remaining_time": "0:04:13", "throughput": 4589.97, "total_tokens": 99779328}
270
+ {"current_steps": 258, "total_steps": 260, "loss": 0.4118, "lr": 3.284641772793862e-08, "epoch": 4.975609756097561, "percentage": 99.23, "elapsed_time": "6:03:44", "remaining_time": "0:02:49", "throughput": 4589.92, "total_tokens": 100172160}
271
+ {"current_steps": 259, "total_steps": 260, "loss": 0.4169, "lr": 1.4599295990352924e-08, "epoch": 4.995121951219512, "percentage": 99.62, "elapsed_time": "6:05:10", "remaining_time": "0:01:24", "throughput": 4589.9, "total_tokens": 100564992}
272
+ {"current_steps": 260, "total_steps": 260, "loss": 0.3937, "lr": 3.6499572194648167e-09, "epoch": 5.0, "percentage": 100.0, "elapsed_time": "6:05:31", "remaining_time": "0:00:00", "throughput": 4589.89, "total_tokens": 100663200}
273
+ {"current_steps": 260, "total_steps": 260, "epoch": 5.0, "percentage": 100.0, "elapsed_time": "6:06:47", "remaining_time": "0:00:00", "throughput": 4574.14, "total_tokens": 100663200}
trainer_state.json ADDED
@@ -0,0 +1,2644 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 5.0,
6
+ "eval_steps": 500,
7
+ "global_step": 260,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.01951219512195122,
14
+ "grad_norm": 0.15858030701789594,
15
+ "learning_rate": 0.0001,
16
+ "loss": 1.9481,
17
+ "num_input_tokens_seen": 392832,
18
+ "step": 1,
19
+ "train_runtime": 90.5356,
20
+ "train_tokens_per_second": 4338.977
21
+ },
22
+ {
23
+ "epoch": 0.03902439024390244,
24
+ "grad_norm": 0.20614269950817926,
25
+ "learning_rate": 9.999635004278054e-05,
26
+ "loss": 1.9426,
27
+ "num_input_tokens_seen": 785664,
28
+ "step": 2,
29
+ "train_runtime": 176.3121,
30
+ "train_tokens_per_second": 4456.097
31
+ },
32
+ {
33
+ "epoch": 0.05853658536585366,
34
+ "grad_norm": 0.11931287845406353,
35
+ "learning_rate": 9.998540070400966e-05,
36
+ "loss": 1.8808,
37
+ "num_input_tokens_seen": 1178496,
38
+ "step": 3,
39
+ "train_runtime": 261.8343,
40
+ "train_tokens_per_second": 4500.924
41
+ },
42
+ {
43
+ "epoch": 0.07804878048780488,
44
+ "grad_norm": 0.1061877717922271,
45
+ "learning_rate": 9.996715358227206e-05,
46
+ "loss": 1.8827,
47
+ "num_input_tokens_seen": 1571328,
48
+ "step": 4,
49
+ "train_runtime": 347.2723,
50
+ "train_tokens_per_second": 4524.772
51
+ },
52
+ {
53
+ "epoch": 0.0975609756097561,
54
+ "grad_norm": 0.2303512070226615,
55
+ "learning_rate": 9.994161134161634e-05,
56
+ "loss": 1.8196,
57
+ "num_input_tokens_seen": 1964160,
58
+ "step": 5,
59
+ "train_runtime": 432.7972,
60
+ "train_tokens_per_second": 4538.292
61
+ },
62
+ {
63
+ "epoch": 0.11707317073170732,
64
+ "grad_norm": 0.10282606368495031,
65
+ "learning_rate": 9.990877771116589e-05,
66
+ "loss": 1.7928,
67
+ "num_input_tokens_seen": 2356992,
68
+ "step": 6,
69
+ "train_runtime": 518.285,
70
+ "train_tokens_per_second": 4547.676
71
+ },
72
+ {
73
+ "epoch": 0.13658536585365855,
74
+ "grad_norm": 0.09334879403165187,
75
+ "learning_rate": 9.986865748457457e-05,
76
+ "loss": 1.7726,
77
+ "num_input_tokens_seen": 2749824,
78
+ "step": 7,
79
+ "train_runtime": 603.8579,
80
+ "train_tokens_per_second": 4553.76
81
+ },
82
+ {
83
+ "epoch": 0.15609756097560976,
84
+ "grad_norm": 0.08922352642855319,
85
+ "learning_rate": 9.982125651932681e-05,
86
+ "loss": 1.7569,
87
+ "num_input_tokens_seen": 3142656,
88
+ "step": 8,
89
+ "train_runtime": 689.4943,
90
+ "train_tokens_per_second": 4557.915
91
+ },
92
+ {
93
+ "epoch": 0.17560975609756097,
94
+ "grad_norm": 0.08017329983520603,
95
+ "learning_rate": 9.976658173588244e-05,
96
+ "loss": 1.7263,
97
+ "num_input_tokens_seen": 3535488,
98
+ "step": 9,
99
+ "train_runtime": 775.2129,
100
+ "train_tokens_per_second": 4560.667
101
+ },
102
+ {
103
+ "epoch": 0.1951219512195122,
104
+ "grad_norm": 0.09081767257053708,
105
+ "learning_rate": 9.970464111666626e-05,
106
+ "loss": 1.7278,
107
+ "num_input_tokens_seen": 3928320,
108
+ "step": 10,
109
+ "train_runtime": 860.7493,
110
+ "train_tokens_per_second": 4563.837
111
+ },
112
+ {
113
+ "epoch": 0.2146341463414634,
114
+ "grad_norm": 0.09473566656122094,
115
+ "learning_rate": 9.96354437049027e-05,
116
+ "loss": 1.719,
117
+ "num_input_tokens_seen": 4321152,
118
+ "step": 11,
119
+ "train_runtime": 946.3458,
120
+ "train_tokens_per_second": 4566.145
121
+ },
122
+ {
123
+ "epoch": 0.23414634146341465,
124
+ "grad_norm": 0.09855985009583386,
125
+ "learning_rate": 9.955899960329547e-05,
126
+ "loss": 1.6873,
127
+ "num_input_tokens_seen": 4713984,
128
+ "step": 12,
129
+ "train_runtime": 1031.9306,
130
+ "train_tokens_per_second": 4568.121
131
+ },
132
+ {
133
+ "epoch": 0.25365853658536586,
134
+ "grad_norm": 0.08976785941545291,
135
+ "learning_rate": 9.947531997255256e-05,
136
+ "loss": 1.6583,
137
+ "num_input_tokens_seen": 5106816,
138
+ "step": 13,
139
+ "train_runtime": 1117.6812,
140
+ "train_tokens_per_second": 4569.117
141
+ },
142
+ {
143
+ "epoch": 0.2731707317073171,
144
+ "grad_norm": 0.13069965458083008,
145
+ "learning_rate": 9.938441702975689e-05,
146
+ "loss": 1.6484,
147
+ "num_input_tokens_seen": 5499648,
148
+ "step": 14,
149
+ "train_runtime": 1203.3824,
150
+ "train_tokens_per_second": 4570.158
151
+ },
152
+ {
153
+ "epoch": 0.2926829268292683,
154
+ "grad_norm": 0.09614160184332628,
155
+ "learning_rate": 9.928630404658255e-05,
156
+ "loss": 1.6392,
157
+ "num_input_tokens_seen": 5892480,
158
+ "step": 15,
159
+ "train_runtime": 1288.9765,
160
+ "train_tokens_per_second": 4571.441
161
+ },
162
+ {
163
+ "epoch": 0.3121951219512195,
164
+ "grad_norm": 0.11451137531983133,
165
+ "learning_rate": 9.918099534735718e-05,
166
+ "loss": 1.6079,
167
+ "num_input_tokens_seen": 6285312,
168
+ "step": 16,
169
+ "train_runtime": 1374.5203,
170
+ "train_tokens_per_second": 4572.731
171
+ },
172
+ {
173
+ "epoch": 0.33170731707317075,
174
+ "grad_norm": 0.1000369686921647,
175
+ "learning_rate": 9.906850630697068e-05,
176
+ "loss": 1.614,
177
+ "num_input_tokens_seen": 6678144,
178
+ "step": 17,
179
+ "train_runtime": 1460.267,
180
+ "train_tokens_per_second": 4573.235
181
+ },
182
+ {
183
+ "epoch": 0.35121951219512193,
184
+ "grad_norm": 0.11186890632545277,
185
+ "learning_rate": 9.894885334863044e-05,
186
+ "loss": 1.5871,
187
+ "num_input_tokens_seen": 7070976,
188
+ "step": 18,
189
+ "train_runtime": 1545.8916,
190
+ "train_tokens_per_second": 4574.044
191
+ },
192
+ {
193
+ "epoch": 0.37073170731707317,
194
+ "grad_norm": 0.12930001237850156,
195
+ "learning_rate": 9.882205394146361e-05,
196
+ "loss": 1.5864,
197
+ "num_input_tokens_seen": 7463808,
198
+ "step": 19,
199
+ "train_runtime": 1631.4769,
200
+ "train_tokens_per_second": 4574.878
201
+ },
202
+ {
203
+ "epoch": 0.3902439024390244,
204
+ "grad_norm": 0.11111621443548497,
205
+ "learning_rate": 9.868812659796668e-05,
206
+ "loss": 1.547,
207
+ "num_input_tokens_seen": 7856640,
208
+ "step": 20,
209
+ "train_runtime": 1717.1186,
210
+ "train_tokens_per_second": 4575.479
211
+ },
212
+ {
213
+ "epoch": 0.4097560975609756,
214
+ "grad_norm": 0.10197780896100014,
215
+ "learning_rate": 9.85470908713026e-05,
216
+ "loss": 1.5349,
217
+ "num_input_tokens_seen": 8249472,
218
+ "step": 21,
219
+ "train_runtime": 1802.6996,
220
+ "train_tokens_per_second": 4576.177
221
+ },
222
+ {
223
+ "epoch": 0.4292682926829268,
224
+ "grad_norm": 0.11362815005489417,
225
+ "learning_rate": 9.839896735244614e-05,
226
+ "loss": 1.5123,
227
+ "num_input_tokens_seen": 8642304,
228
+ "step": 22,
229
+ "train_runtime": 1888.2112,
230
+ "train_tokens_per_second": 4576.98
231
+ },
232
+ {
233
+ "epoch": 0.44878048780487806,
234
+ "grad_norm": 0.5028658075504846,
235
+ "learning_rate": 9.824377766717759e-05,
236
+ "loss": 1.4901,
237
+ "num_input_tokens_seen": 9035136,
238
+ "step": 23,
239
+ "train_runtime": 1973.7336,
240
+ "train_tokens_per_second": 4577.688
241
+ },
242
+ {
243
+ "epoch": 0.4682926829268293,
244
+ "grad_norm": 0.12820339687555396,
245
+ "learning_rate": 9.808154447292539e-05,
246
+ "loss": 1.4924,
247
+ "num_input_tokens_seen": 9427968,
248
+ "step": 24,
249
+ "train_runtime": 2059.3532,
250
+ "train_tokens_per_second": 4578.121
251
+ },
252
+ {
253
+ "epoch": 0.4878048780487805,
254
+ "grad_norm": 0.13588084054069685,
255
+ "learning_rate": 9.791229145545831e-05,
256
+ "loss": 1.4489,
257
+ "num_input_tokens_seen": 9820800,
258
+ "step": 25,
259
+ "train_runtime": 2145.0524,
260
+ "train_tokens_per_second": 4578.35
261
+ },
262
+ {
263
+ "epoch": 0.5073170731707317,
264
+ "grad_norm": 0.13132153795044452,
265
+ "learning_rate": 9.773604332542729e-05,
266
+ "loss": 1.4188,
267
+ "num_input_tokens_seen": 10213632,
268
+ "step": 26,
269
+ "train_runtime": 2230.6879,
270
+ "train_tokens_per_second": 4578.692
271
+ },
272
+ {
273
+ "epoch": 0.526829268292683,
274
+ "grad_norm": 0.13496880260647526,
275
+ "learning_rate": 9.755282581475769e-05,
276
+ "loss": 1.4272,
277
+ "num_input_tokens_seen": 10606464,
278
+ "step": 27,
279
+ "train_runtime": 2316.1437,
280
+ "train_tokens_per_second": 4579.364
281
+ },
282
+ {
283
+ "epoch": 0.5463414634146342,
284
+ "grad_norm": 0.14799450429263764,
285
+ "learning_rate": 9.736266567289253e-05,
286
+ "loss": 1.3815,
287
+ "num_input_tokens_seen": 10999296,
288
+ "step": 28,
289
+ "train_runtime": 2401.7043,
290
+ "train_tokens_per_second": 4579.788
291
+ },
292
+ {
293
+ "epoch": 0.5658536585365853,
294
+ "grad_norm": 0.32782733922481533,
295
+ "learning_rate": 9.716559066288715e-05,
296
+ "loss": 1.3752,
297
+ "num_input_tokens_seen": 11392128,
298
+ "step": 29,
299
+ "train_runtime": 2487.3215,
300
+ "train_tokens_per_second": 4580.079
301
+ },
302
+ {
303
+ "epoch": 0.5853658536585366,
304
+ "grad_norm": 0.1534924395682076,
305
+ "learning_rate": 9.696162955735577e-05,
306
+ "loss": 1.3629,
307
+ "num_input_tokens_seen": 11784960,
308
+ "step": 30,
309
+ "train_runtime": 2572.9464,
310
+ "train_tokens_per_second": 4580.336
311
+ },
312
+ {
313
+ "epoch": 0.6048780487804878,
314
+ "grad_norm": 0.23747387980155885,
315
+ "learning_rate": 9.675081213427076e-05,
316
+ "loss": 1.3163,
317
+ "num_input_tokens_seen": 12177792,
318
+ "step": 31,
319
+ "train_runtime": 2658.4902,
320
+ "train_tokens_per_second": 4580.717
321
+ },
322
+ {
323
+ "epoch": 0.624390243902439,
324
+ "grad_norm": 0.1724017252468989,
325
+ "learning_rate": 9.653316917261512e-05,
326
+ "loss": 1.3058,
327
+ "num_input_tokens_seen": 12570624,
328
+ "step": 32,
329
+ "train_runtime": 2743.9643,
330
+ "train_tokens_per_second": 4581.191
331
+ },
332
+ {
333
+ "epoch": 0.6439024390243903,
334
+ "grad_norm": 0.1619965153711787,
335
+ "learning_rate": 9.630873244788883e-05,
336
+ "loss": 1.2774,
337
+ "num_input_tokens_seen": 12963456,
338
+ "step": 33,
339
+ "train_runtime": 2829.4055,
340
+ "train_tokens_per_second": 4581.689
341
+ },
342
+ {
343
+ "epoch": 0.6634146341463415,
344
+ "grad_norm": 0.18133620259351485,
345
+ "learning_rate": 9.607753472746966e-05,
346
+ "loss": 1.2585,
347
+ "num_input_tokens_seen": 13356288,
348
+ "step": 34,
349
+ "train_runtime": 2914.8292,
350
+ "train_tokens_per_second": 4582.185
351
+ },
352
+ {
353
+ "epoch": 0.6829268292682927,
354
+ "grad_norm": 0.20121493987838818,
355
+ "learning_rate": 9.583960976582913e-05,
356
+ "loss": 1.2429,
357
+ "num_input_tokens_seen": 13749120,
358
+ "step": 35,
359
+ "train_runtime": 3000.3011,
360
+ "train_tokens_per_second": 4582.58
361
+ },
362
+ {
363
+ "epoch": 0.7024390243902439,
364
+ "grad_norm": 0.2485985257238104,
365
+ "learning_rate": 9.559499229960451e-05,
366
+ "loss": 1.2092,
367
+ "num_input_tokens_seen": 14141952,
368
+ "step": 36,
369
+ "train_runtime": 3085.8763,
370
+ "train_tokens_per_second": 4582.799
371
+ },
372
+ {
373
+ "epoch": 0.7219512195121951,
374
+ "grad_norm": 0.24323454189307991,
375
+ "learning_rate": 9.534371804252728e-05,
376
+ "loss": 1.1789,
377
+ "num_input_tokens_seen": 14534784,
378
+ "step": 37,
379
+ "train_runtime": 3171.7531,
380
+ "train_tokens_per_second": 4582.571
381
+ },
382
+ {
383
+ "epoch": 0.7414634146341463,
384
+ "grad_norm": 0.34801979161039454,
385
+ "learning_rate": 9.508582368020897e-05,
386
+ "loss": 1.1495,
387
+ "num_input_tokens_seen": 14927616,
388
+ "step": 38,
389
+ "train_runtime": 3257.3777,
390
+ "train_tokens_per_second": 4582.71
391
+ },
392
+ {
393
+ "epoch": 0.7609756097560976,
394
+ "grad_norm": 0.6127611336291061,
395
+ "learning_rate": 9.482134686478519e-05,
396
+ "loss": 1.141,
397
+ "num_input_tokens_seen": 15320448,
398
+ "step": 39,
399
+ "train_runtime": 3342.782,
400
+ "train_tokens_per_second": 4583.143
401
+ },
402
+ {
403
+ "epoch": 0.7804878048780488,
404
+ "grad_norm": 0.6420115225722821,
405
+ "learning_rate": 9.45503262094184e-05,
406
+ "loss": 1.1236,
407
+ "num_input_tokens_seen": 15713280,
408
+ "step": 40,
409
+ "train_runtime": 3428.2665,
410
+ "train_tokens_per_second": 4583.448
411
+ },
412
+ {
413
+ "epoch": 0.8,
414
+ "grad_norm": 0.28502804596451176,
415
+ "learning_rate": 9.42728012826605e-05,
416
+ "loss": 1.1034,
417
+ "num_input_tokens_seen": 16106112,
418
+ "step": 41,
419
+ "train_runtime": 3513.7978,
420
+ "train_tokens_per_second": 4583.676
421
+ },
422
+ {
423
+ "epoch": 0.8195121951219512,
424
+ "grad_norm": 0.716313084661846,
425
+ "learning_rate": 9.398881260267589e-05,
426
+ "loss": 1.1052,
427
+ "num_input_tokens_seen": 16498944,
428
+ "step": 42,
429
+ "train_runtime": 3599.2462,
430
+ "train_tokens_per_second": 4584.0
431
+ },
432
+ {
433
+ "epoch": 0.8390243902439024,
434
+ "grad_norm": 0.3220303313208368,
435
+ "learning_rate": 9.36984016313259e-05,
436
+ "loss": 1.0595,
437
+ "num_input_tokens_seen": 16891776,
438
+ "step": 43,
439
+ "train_runtime": 3684.6336,
440
+ "train_tokens_per_second": 4584.384
441
+ },
442
+ {
443
+ "epoch": 0.8585365853658536,
444
+ "grad_norm": 0.46297801425335283,
445
+ "learning_rate": 9.340161076811537e-05,
446
+ "loss": 1.0507,
447
+ "num_input_tokens_seen": 17284608,
448
+ "step": 44,
449
+ "train_runtime": 3770.0588,
450
+ "train_tokens_per_second": 4584.705
451
+ },
452
+ {
453
+ "epoch": 0.8780487804878049,
454
+ "grad_norm": 0.3229025177696428,
455
+ "learning_rate": 9.309848334400246e-05,
456
+ "loss": 1.0204,
457
+ "num_input_tokens_seen": 17677440,
458
+ "step": 45,
459
+ "train_runtime": 3855.5127,
460
+ "train_tokens_per_second": 4584.978
461
+ },
462
+ {
463
+ "epoch": 0.8975609756097561,
464
+ "grad_norm": 0.4974629296010873,
465
+ "learning_rate": 9.278906361507238e-05,
466
+ "loss": 1.0275,
467
+ "num_input_tokens_seen": 18070272,
468
+ "step": 46,
469
+ "train_runtime": 3940.8899,
470
+ "train_tokens_per_second": 4585.328
471
+ },
472
+ {
473
+ "epoch": 0.9170731707317074,
474
+ "grad_norm": 0.3252828591259486,
475
+ "learning_rate": 9.247339675607605e-05,
476
+ "loss": 0.9942,
477
+ "num_input_tokens_seen": 18463104,
478
+ "step": 47,
479
+ "train_runtime": 4026.4826,
480
+ "train_tokens_per_second": 4585.418
481
+ },
482
+ {
483
+ "epoch": 0.9365853658536586,
484
+ "grad_norm": 0.4096451495593207,
485
+ "learning_rate": 9.215152885383474e-05,
486
+ "loss": 0.9636,
487
+ "num_input_tokens_seen": 18855936,
488
+ "step": 48,
489
+ "train_runtime": 4111.9693,
490
+ "train_tokens_per_second": 4585.622
491
+ },
492
+ {
493
+ "epoch": 0.9560975609756097,
494
+ "grad_norm": 0.34175853473817397,
495
+ "learning_rate": 9.182350690051133e-05,
496
+ "loss": 0.9595,
497
+ "num_input_tokens_seen": 19248768,
498
+ "step": 49,
499
+ "train_runtime": 4197.3817,
500
+ "train_tokens_per_second": 4585.899
501
+ },
502
+ {
503
+ "epoch": 0.975609756097561,
504
+ "grad_norm": 0.7544878577908084,
505
+ "learning_rate": 9.148937878674976e-05,
506
+ "loss": 0.9406,
507
+ "num_input_tokens_seen": 19641600,
508
+ "step": 50,
509
+ "train_runtime": 4282.7892,
510
+ "train_tokens_per_second": 4586.17
511
+ },
512
+ {
513
+ "epoch": 0.9951219512195122,
514
+ "grad_norm": 0.45005702668900516,
515
+ "learning_rate": 9.114919329468282e-05,
516
+ "loss": 0.9266,
517
+ "num_input_tokens_seen": 20034432,
518
+ "step": 51,
519
+ "train_runtime": 4368.202,
520
+ "train_tokens_per_second": 4586.425
521
+ },
522
+ {
523
+ "epoch": 1.0,
524
+ "grad_norm": 0.5681712533129535,
525
+ "learning_rate": 9.080300009081024e-05,
526
+ "loss": 0.9145,
527
+ "num_input_tokens_seen": 20132640,
528
+ "step": 52,
529
+ "train_runtime": 4389.6005,
530
+ "train_tokens_per_second": 4586.44
531
+ },
532
+ {
533
+ "epoch": 1.0195121951219512,
534
+ "grad_norm": 0.5035806305267847,
535
+ "learning_rate": 9.045084971874738e-05,
536
+ "loss": 0.8835,
537
+ "num_input_tokens_seen": 20525472,
538
+ "step": 53,
539
+ "train_runtime": 4475.0972,
540
+ "train_tokens_per_second": 4586.598
541
+ },
542
+ {
543
+ "epoch": 1.0390243902439025,
544
+ "grad_norm": 1.5437125934865135,
545
+ "learning_rate": 9.009279359184593e-05,
546
+ "loss": 0.8894,
547
+ "num_input_tokens_seen": 20918304,
548
+ "step": 54,
549
+ "train_runtime": 4560.5362,
550
+ "train_tokens_per_second": 4586.808
551
+ },
552
+ {
553
+ "epoch": 1.0585365853658537,
554
+ "grad_norm": 2.5544072695829527,
555
+ "learning_rate": 8.972888398568772e-05,
556
+ "loss": 0.8801,
557
+ "num_input_tokens_seen": 21311136,
558
+ "step": 55,
559
+ "train_runtime": 4646.0135,
560
+ "train_tokens_per_second": 4586.972
561
+ },
562
+ {
563
+ "epoch": 1.078048780487805,
564
+ "grad_norm": 6.413568180777226,
565
+ "learning_rate": 8.935917403045251e-05,
566
+ "loss": 0.8761,
567
+ "num_input_tokens_seen": 21703968,
568
+ "step": 56,
569
+ "train_runtime": 4731.4435,
570
+ "train_tokens_per_second": 4587.177
571
+ },
572
+ {
573
+ "epoch": 1.0975609756097562,
574
+ "grad_norm": 3.6652053687664377,
575
+ "learning_rate": 8.898371770316111e-05,
576
+ "loss": 0.8785,
577
+ "num_input_tokens_seen": 22096800,
578
+ "step": 57,
579
+ "train_runtime": 4816.8429,
580
+ "train_tokens_per_second": 4587.403
581
+ },
582
+ {
583
+ "epoch": 1.1170731707317074,
584
+ "grad_norm": 2.350713418610044,
585
+ "learning_rate": 8.860256981979484e-05,
586
+ "loss": 0.8964,
587
+ "num_input_tokens_seen": 22489632,
588
+ "step": 58,
589
+ "train_runtime": 4902.2866,
590
+ "train_tokens_per_second": 4587.58
591
+ },
592
+ {
593
+ "epoch": 1.1365853658536587,
594
+ "grad_norm": 1.458077107848317,
595
+ "learning_rate": 8.821578602729242e-05,
596
+ "loss": 0.8833,
597
+ "num_input_tokens_seen": 22882464,
598
+ "step": 59,
599
+ "train_runtime": 4987.6858,
600
+ "train_tokens_per_second": 4587.792
601
+ },
602
+ {
603
+ "epoch": 1.1560975609756097,
604
+ "grad_norm": 2.3627094246816553,
605
+ "learning_rate": 8.782342279542568e-05,
606
+ "loss": 0.8698,
607
+ "num_input_tokens_seen": 23275296,
608
+ "step": 60,
609
+ "train_runtime": 5073.0959,
610
+ "train_tokens_per_second": 4587.987
611
+ },
612
+ {
613
+ "epoch": 1.175609756097561,
614
+ "grad_norm": 1.3869190096740975,
615
+ "learning_rate": 8.742553740855506e-05,
616
+ "loss": 0.864,
617
+ "num_input_tokens_seen": 23668128,
618
+ "step": 61,
619
+ "train_runtime": 5158.4893,
620
+ "train_tokens_per_second": 4588.19
621
+ },
622
+ {
623
+ "epoch": 1.1951219512195121,
624
+ "grad_norm": 1.1437940013985328,
625
+ "learning_rate": 8.702218795726617e-05,
626
+ "loss": 0.8551,
627
+ "num_input_tokens_seen": 24060960,
628
+ "step": 62,
629
+ "train_runtime": 5243.865,
630
+ "train_tokens_per_second": 4588.402
631
+ },
632
+ {
633
+ "epoch": 1.2146341463414634,
634
+ "grad_norm": 2.801137587074987,
635
+ "learning_rate": 8.661343332988869e-05,
636
+ "loss": 0.8327,
637
+ "num_input_tokens_seen": 24453792,
638
+ "step": 63,
639
+ "train_runtime": 5329.3228,
640
+ "train_tokens_per_second": 4588.536
641
+ },
642
+ {
643
+ "epoch": 1.2341463414634146,
644
+ "grad_norm": 1.1087446235917915,
645
+ "learning_rate": 8.619933320389872e-05,
646
+ "loss": 0.8003,
647
+ "num_input_tokens_seen": 24846624,
648
+ "step": 64,
649
+ "train_runtime": 5414.7776,
650
+ "train_tokens_per_second": 4588.669
651
+ },
652
+ {
653
+ "epoch": 1.2536585365853659,
654
+ "grad_norm": 1.1639583727147402,
655
+ "learning_rate": 8.577994803720606e-05,
656
+ "loss": 0.8078,
657
+ "num_input_tokens_seen": 25239456,
658
+ "step": 65,
659
+ "train_runtime": 5500.2413,
660
+ "train_tokens_per_second": 4588.791
661
+ },
662
+ {
663
+ "epoch": 1.273170731707317,
664
+ "grad_norm": 0.8450850350334035,
665
+ "learning_rate": 8.535533905932738e-05,
666
+ "loss": 0.8033,
667
+ "num_input_tokens_seen": 25632288,
668
+ "step": 66,
669
+ "train_runtime": 5585.6797,
670
+ "train_tokens_per_second": 4588.929
671
+ },
672
+ {
673
+ "epoch": 1.2926829268292683,
674
+ "grad_norm": 4.138498725123297,
675
+ "learning_rate": 8.492556826244687e-05,
676
+ "loss": 0.7751,
677
+ "num_input_tokens_seen": 26025120,
678
+ "step": 67,
679
+ "train_runtime": 5671.0727,
680
+ "train_tokens_per_second": 4589.1
681
+ },
682
+ {
683
+ "epoch": 1.3121951219512196,
684
+ "grad_norm": 1.2519271390201068,
685
+ "learning_rate": 8.449069839236538e-05,
686
+ "loss": 0.7998,
687
+ "num_input_tokens_seen": 26417952,
688
+ "step": 68,
689
+ "train_runtime": 5756.4201,
690
+ "train_tokens_per_second": 4589.302
691
+ },
692
+ {
693
+ "epoch": 1.3317073170731708,
694
+ "grad_norm": 2.0612640489651586,
695
+ "learning_rate": 8.405079293933986e-05,
696
+ "loss": 0.7949,
697
+ "num_input_tokens_seen": 26810784,
698
+ "step": 69,
699
+ "train_runtime": 5841.8015,
700
+ "train_tokens_per_second": 4589.472
701
+ },
702
+ {
703
+ "epoch": 1.3512195121951218,
704
+ "grad_norm": 1.6632187783624717,
705
+ "learning_rate": 8.360591612881364e-05,
706
+ "loss": 0.7718,
707
+ "num_input_tokens_seen": 27203616,
708
+ "step": 70,
709
+ "train_runtime": 5927.224,
710
+ "train_tokens_per_second": 4589.605
711
+ },
712
+ {
713
+ "epoch": 1.370731707317073,
714
+ "grad_norm": 0.8557124244577246,
715
+ "learning_rate": 8.315613291203976e-05,
716
+ "loss": 0.7827,
717
+ "num_input_tokens_seen": 27596448,
718
+ "step": 71,
719
+ "train_runtime": 6012.6847,
720
+ "train_tokens_per_second": 4589.705
721
+ },
722
+ {
723
+ "epoch": 1.3902439024390243,
724
+ "grad_norm": 1.0542485300201978,
725
+ "learning_rate": 8.270150895659823e-05,
726
+ "loss": 0.7595,
727
+ "num_input_tokens_seen": 27989280,
728
+ "step": 72,
729
+ "train_runtime": 6098.1854,
730
+ "train_tokens_per_second": 4589.772
731
+ },
732
+ {
733
+ "epoch": 1.4097560975609755,
734
+ "grad_norm": 1.7657947939512917,
735
+ "learning_rate": 8.224211063680853e-05,
736
+ "loss": 0.7416,
737
+ "num_input_tokens_seen": 28382112,
738
+ "step": 73,
739
+ "train_runtime": 6183.6199,
740
+ "train_tokens_per_second": 4589.886
741
+ },
742
+ {
743
+ "epoch": 1.4292682926829268,
744
+ "grad_norm": 1.8137027126017153,
745
+ "learning_rate": 8.177800502403928e-05,
746
+ "loss": 0.749,
747
+ "num_input_tokens_seen": 28774944,
748
+ "step": 74,
749
+ "train_runtime": 6269.1266,
750
+ "train_tokens_per_second": 4589.945
751
+ },
752
+ {
753
+ "epoch": 1.448780487804878,
754
+ "grad_norm": 1.3646726182984732,
755
+ "learning_rate": 8.130925987691569e-05,
756
+ "loss": 0.72,
757
+ "num_input_tokens_seen": 29167776,
758
+ "step": 75,
759
+ "train_runtime": 6354.5877,
760
+ "train_tokens_per_second": 4590.034
761
+ },
762
+ {
763
+ "epoch": 1.4682926829268292,
764
+ "grad_norm": 2.817948027228682,
765
+ "learning_rate": 8.083594363142717e-05,
766
+ "loss": 0.7192,
767
+ "num_input_tokens_seen": 29560608,
768
+ "step": 76,
769
+ "train_runtime": 6440.0829,
770
+ "train_tokens_per_second": 4590.097
771
+ },
772
+ {
773
+ "epoch": 1.4878048780487805,
774
+ "grad_norm": 2.8685745350018976,
775
+ "learning_rate": 8.035812539093557e-05,
776
+ "loss": 0.7221,
777
+ "num_input_tokens_seen": 29953440,
778
+ "step": 77,
779
+ "train_runtime": 6525.6763,
780
+ "train_tokens_per_second": 4590.09
781
+ },
782
+ {
783
+ "epoch": 1.5073170731707317,
784
+ "grad_norm": 0.806171095791916,
785
+ "learning_rate": 7.987587491608637e-05,
786
+ "loss": 0.7031,
787
+ "num_input_tokens_seen": 30346272,
788
+ "step": 78,
789
+ "train_runtime": 6611.3229,
790
+ "train_tokens_per_second": 4590.045
791
+ },
792
+ {
793
+ "epoch": 1.526829268292683,
794
+ "grad_norm": 3.0602847716922237,
795
+ "learning_rate": 7.938926261462366e-05,
796
+ "loss": 0.7118,
797
+ "num_input_tokens_seen": 30739104,
798
+ "step": 79,
799
+ "train_runtime": 6696.8792,
800
+ "train_tokens_per_second": 4590.064
801
+ },
802
+ {
803
+ "epoch": 1.5463414634146342,
804
+ "grad_norm": 1.0137101758685187,
805
+ "learning_rate": 7.889835953111075e-05,
806
+ "loss": 0.6952,
807
+ "num_input_tokens_seen": 31131936,
808
+ "step": 80,
809
+ "train_runtime": 6782.7752,
810
+ "train_tokens_per_second": 4589.852
811
+ },
812
+ {
813
+ "epoch": 1.5658536585365854,
814
+ "grad_norm": 1.4320523827445433,
815
+ "learning_rate": 7.840323733655778e-05,
816
+ "loss": 0.6693,
817
+ "num_input_tokens_seen": 31524768,
818
+ "step": 81,
819
+ "train_runtime": 6868.9208,
820
+ "train_tokens_per_second": 4589.479
821
+ },
822
+ {
823
+ "epoch": 1.5853658536585367,
824
+ "grad_norm": 3.2012899077086177,
825
+ "learning_rate": 7.790396831795792e-05,
826
+ "loss": 0.6718,
827
+ "num_input_tokens_seen": 31917600,
828
+ "step": 82,
829
+ "train_runtime": 6954.5194,
830
+ "train_tokens_per_second": 4589.476
831
+ },
832
+ {
833
+ "epoch": 1.604878048780488,
834
+ "grad_norm": 0.7553296877905612,
835
+ "learning_rate": 7.740062536773352e-05,
836
+ "loss": 0.67,
837
+ "num_input_tokens_seen": 32310432,
838
+ "step": 83,
839
+ "train_runtime": 7040.0533,
840
+ "train_tokens_per_second": 4589.515
841
+ },
842
+ {
843
+ "epoch": 1.6243902439024391,
844
+ "grad_norm": 1.069764102117734,
845
+ "learning_rate": 7.689328197309393e-05,
846
+ "loss": 0.6688,
847
+ "num_input_tokens_seen": 32703264,
848
+ "step": 84,
849
+ "train_runtime": 7125.6152,
850
+ "train_tokens_per_second": 4589.536
851
+ },
852
+ {
853
+ "epoch": 1.6439024390243904,
854
+ "grad_norm": 2.635419412507475,
855
+ "learning_rate": 7.638201220530665e-05,
856
+ "loss": 0.6672,
857
+ "num_input_tokens_seen": 33096096,
858
+ "step": 85,
859
+ "train_runtime": 7211.09,
860
+ "train_tokens_per_second": 4589.611
861
+ },
862
+ {
863
+ "epoch": 1.6634146341463416,
864
+ "grad_norm": 0.7024644501267906,
865
+ "learning_rate": 7.586689070888284e-05,
866
+ "loss": 0.6573,
867
+ "num_input_tokens_seen": 33488928,
868
+ "step": 86,
869
+ "train_runtime": 7296.5933,
870
+ "train_tokens_per_second": 4589.666
871
+ },
872
+ {
873
+ "epoch": 1.6829268292682928,
874
+ "grad_norm": 0.44428213007263,
875
+ "learning_rate": 7.534799269067953e-05,
876
+ "loss": 0.6422,
877
+ "num_input_tokens_seen": 33881760,
878
+ "step": 87,
879
+ "train_runtime": 7382.0301,
880
+ "train_tokens_per_second": 4589.762
881
+ },
882
+ {
883
+ "epoch": 1.7024390243902439,
884
+ "grad_norm": 0.5416450836446436,
885
+ "learning_rate": 7.48253939089194e-05,
886
+ "loss": 0.6269,
887
+ "num_input_tokens_seen": 34274592,
888
+ "step": 88,
889
+ "train_runtime": 7467.4502,
890
+ "train_tokens_per_second": 4589.865
891
+ },
892
+ {
893
+ "epoch": 1.721951219512195,
894
+ "grad_norm": 0.5642965602823063,
895
+ "learning_rate": 7.42991706621303e-05,
896
+ "loss": 0.6357,
897
+ "num_input_tokens_seen": 34667424,
898
+ "step": 89,
899
+ "train_runtime": 7553.0173,
900
+ "train_tokens_per_second": 4589.878
901
+ },
902
+ {
903
+ "epoch": 1.7414634146341463,
904
+ "grad_norm": 0.5097755585636675,
905
+ "learning_rate": 7.376939977800582e-05,
906
+ "loss": 0.6299,
907
+ "num_input_tokens_seen": 35060256,
908
+ "step": 90,
909
+ "train_runtime": 7638.6282,
910
+ "train_tokens_per_second": 4589.863
911
+ },
912
+ {
913
+ "epoch": 1.7609756097560976,
914
+ "grad_norm": 0.45556407265977034,
915
+ "learning_rate": 7.323615860218843e-05,
916
+ "loss": 0.634,
917
+ "num_input_tokens_seen": 35453088,
918
+ "step": 91,
919
+ "train_runtime": 7724.1622,
920
+ "train_tokens_per_second": 4589.894
921
+ },
922
+ {
923
+ "epoch": 1.7804878048780488,
924
+ "grad_norm": 0.4536282771598066,
925
+ "learning_rate": 7.269952498697734e-05,
926
+ "loss": 0.6251,
927
+ "num_input_tokens_seen": 35845920,
928
+ "step": 92,
929
+ "train_runtime": 7809.6142,
930
+ "train_tokens_per_second": 4589.973
931
+ },
932
+ {
933
+ "epoch": 1.8,
934
+ "grad_norm": 0.43350812308828435,
935
+ "learning_rate": 7.215957727996207e-05,
936
+ "loss": 0.62,
937
+ "num_input_tokens_seen": 36238752,
938
+ "step": 93,
939
+ "train_runtime": 7895.006,
940
+ "train_tokens_per_second": 4590.085
941
+ },
942
+ {
943
+ "epoch": 1.819512195121951,
944
+ "grad_norm": 0.40833077692944497,
945
+ "learning_rate": 7.161639431258387e-05,
946
+ "loss": 0.6073,
947
+ "num_input_tokens_seen": 36631584,
948
+ "step": 94,
949
+ "train_runtime": 7980.4652,
950
+ "train_tokens_per_second": 4590.156
951
+ },
952
+ {
953
+ "epoch": 1.8390243902439023,
954
+ "grad_norm": 0.4235068021237779,
955
+ "learning_rate": 7.107005538862646e-05,
956
+ "loss": 0.6074,
957
+ "num_input_tokens_seen": 37024416,
958
+ "step": 95,
959
+ "train_runtime": 8065.9658,
960
+ "train_tokens_per_second": 4590.202
961
+ },
962
+ {
963
+ "epoch": 1.8585365853658535,
964
+ "grad_norm": 0.4132415083645228,
965
+ "learning_rate": 7.052064027263786e-05,
966
+ "loss": 0.6064,
967
+ "num_input_tokens_seen": 37417248,
968
+ "step": 96,
969
+ "train_runtime": 8151.397,
970
+ "train_tokens_per_second": 4590.287
971
+ },
972
+ {
973
+ "epoch": 1.8780487804878048,
974
+ "grad_norm": 0.4322861748139327,
975
+ "learning_rate": 6.996822917828477e-05,
976
+ "loss": 0.5922,
977
+ "num_input_tokens_seen": 37810080,
978
+ "step": 97,
979
+ "train_runtime": 8236.8862,
980
+ "train_tokens_per_second": 4590.337
981
+ },
982
+ {
983
+ "epoch": 1.897560975609756,
984
+ "grad_norm": 0.38942999050451615,
985
+ "learning_rate": 6.941290275664174e-05,
986
+ "loss": 0.5939,
987
+ "num_input_tokens_seen": 38202912,
988
+ "step": 98,
989
+ "train_runtime": 8322.3209,
990
+ "train_tokens_per_second": 4590.416
991
+ },
992
+ {
993
+ "epoch": 1.9170731707317072,
994
+ "grad_norm": 0.3753721554361611,
995
+ "learning_rate": 6.885474208441603e-05,
996
+ "loss": 0.5913,
997
+ "num_input_tokens_seen": 38595744,
998
+ "step": 99,
999
+ "train_runtime": 8407.7641,
1000
+ "train_tokens_per_second": 4590.488
1001
+ },
1002
+ {
1003
+ "epoch": 1.9365853658536585,
1004
+ "grad_norm": 0.35005150318450906,
1005
+ "learning_rate": 6.829382865211063e-05,
1006
+ "loss": 0.5863,
1007
+ "num_input_tokens_seen": 38988576,
1008
+ "step": 100,
1009
+ "train_runtime": 8493.2736,
1010
+ "train_tokens_per_second": 4590.524
1011
+ },
1012
+ {
1013
+ "epoch": 1.9560975609756097,
1014
+ "grad_norm": 0.37808190956380233,
1015
+ "learning_rate": 6.773024435212678e-05,
1016
+ "loss": 0.5822,
1017
+ "num_input_tokens_seen": 39381408,
1018
+ "step": 101,
1019
+ "train_runtime": 8578.7547,
1020
+ "train_tokens_per_second": 4590.574
1021
+ },
1022
+ {
1023
+ "epoch": 1.975609756097561,
1024
+ "grad_norm": 0.3580568006066326,
1025
+ "learning_rate": 6.716407146680792e-05,
1026
+ "loss": 0.5666,
1027
+ "num_input_tokens_seen": 39774240,
1028
+ "step": 102,
1029
+ "train_runtime": 8664.2328,
1030
+ "train_tokens_per_second": 4590.625
1031
+ },
1032
+ {
1033
+ "epoch": 1.9951219512195122,
1034
+ "grad_norm": 0.39572577046735974,
1035
+ "learning_rate": 6.659539265642643e-05,
1036
+ "loss": 0.5806,
1037
+ "num_input_tokens_seen": 40167072,
1038
+ "step": 103,
1039
+ "train_runtime": 8749.6718,
1040
+ "train_tokens_per_second": 4590.695
1041
+ },
1042
+ {
1043
+ "epoch": 2.0,
1044
+ "grad_norm": 0.5994329532374332,
1045
+ "learning_rate": 6.602429094711548e-05,
1046
+ "loss": 0.5605,
1047
+ "num_input_tokens_seen": 40265280,
1048
+ "step": 104,
1049
+ "train_runtime": 8771.0021,
1050
+ "train_tokens_per_second": 4590.727
1051
+ },
1052
+ {
1053
+ "epoch": 2.0195121951219512,
1054
+ "grad_norm": 0.3746452769053094,
1055
+ "learning_rate": 6.545084971874738e-05,
1056
+ "loss": 0.5462,
1057
+ "num_input_tokens_seen": 40658112,
1058
+ "step": 105,
1059
+ "train_runtime": 8856.5287,
1060
+ "train_tokens_per_second": 4590.75
1061
+ },
1062
+ {
1063
+ "epoch": 2.0390243902439025,
1064
+ "grad_norm": 0.453029236937953,
1065
+ "learning_rate": 6.487515269276016e-05,
1066
+ "loss": 0.5447,
1067
+ "num_input_tokens_seen": 41050944,
1068
+ "step": 106,
1069
+ "train_runtime": 8942.0292,
1070
+ "train_tokens_per_second": 4590.786
1071
+ },
1072
+ {
1073
+ "epoch": 2.0585365853658537,
1074
+ "grad_norm": 0.39272552581140363,
1075
+ "learning_rate": 6.429728391993446e-05,
1076
+ "loss": 0.5405,
1077
+ "num_input_tokens_seen": 41443776,
1078
+ "step": 107,
1079
+ "train_runtime": 9027.5293,
1080
+ "train_tokens_per_second": 4590.822
1081
+ },
1082
+ {
1083
+ "epoch": 2.078048780487805,
1084
+ "grad_norm": 0.35801484704748626,
1085
+ "learning_rate": 6.37173277681223e-05,
1086
+ "loss": 0.5433,
1087
+ "num_input_tokens_seen": 41836608,
1088
+ "step": 108,
1089
+ "train_runtime": 9113.0265,
1090
+ "train_tokens_per_second": 4590.858
1091
+ },
1092
+ {
1093
+ "epoch": 2.097560975609756,
1094
+ "grad_norm": 0.3703575056088343,
1095
+ "learning_rate": 6.313536890992935e-05,
1096
+ "loss": 0.5335,
1097
+ "num_input_tokens_seen": 42229440,
1098
+ "step": 109,
1099
+ "train_runtime": 9198.6667,
1100
+ "train_tokens_per_second": 4590.822
1101
+ },
1102
+ {
1103
+ "epoch": 2.1170731707317074,
1104
+ "grad_norm": 0.3456947105084525,
1105
+ "learning_rate": 6.255149231035309e-05,
1106
+ "loss": 0.5416,
1107
+ "num_input_tokens_seen": 42622272,
1108
+ "step": 110,
1109
+ "train_runtime": 9284.269,
1110
+ "train_tokens_per_second": 4590.805
1111
+ },
1112
+ {
1113
+ "epoch": 2.1365853658536587,
1114
+ "grad_norm": 0.33745389931809666,
1115
+ "learning_rate": 6.19657832143779e-05,
1116
+ "loss": 0.5322,
1117
+ "num_input_tokens_seen": 43015104,
1118
+ "step": 111,
1119
+ "train_runtime": 9369.843,
1120
+ "train_tokens_per_second": 4590.803
1121
+ },
1122
+ {
1123
+ "epoch": 2.15609756097561,
1124
+ "grad_norm": 0.33453831659901945,
1125
+ "learning_rate": 6.13783271345295e-05,
1126
+ "loss": 0.5301,
1127
+ "num_input_tokens_seen": 43407936,
1128
+ "step": 112,
1129
+ "train_runtime": 9455.4833,
1130
+ "train_tokens_per_second": 4590.769
1131
+ },
1132
+ {
1133
+ "epoch": 2.175609756097561,
1134
+ "grad_norm": 0.3355284079398747,
1135
+ "learning_rate": 6.078920983839031e-05,
1136
+ "loss": 0.5422,
1137
+ "num_input_tokens_seen": 43800768,
1138
+ "step": 113,
1139
+ "train_runtime": 9541.0542,
1140
+ "train_tokens_per_second": 4590.768
1141
+ },
1142
+ {
1143
+ "epoch": 2.1951219512195124,
1144
+ "grad_norm": 0.3257056473237624,
1145
+ "learning_rate": 6.019851733607744e-05,
1146
+ "loss": 0.5352,
1147
+ "num_input_tokens_seen": 44193600,
1148
+ "step": 114,
1149
+ "train_runtime": 9626.532,
1150
+ "train_tokens_per_second": 4590.812
1151
+ },
1152
+ {
1153
+ "epoch": 2.2146341463414636,
1154
+ "grad_norm": 0.32389003819610007,
1155
+ "learning_rate": 5.960633586768543e-05,
1156
+ "loss": 0.5112,
1157
+ "num_input_tokens_seen": 44586432,
1158
+ "step": 115,
1159
+ "train_runtime": 9712.1322,
1160
+ "train_tokens_per_second": 4590.797
1161
+ },
1162
+ {
1163
+ "epoch": 2.234146341463415,
1164
+ "grad_norm": 0.34317758929799447,
1165
+ "learning_rate": 5.90127518906953e-05,
1166
+ "loss": 0.5171,
1167
+ "num_input_tokens_seen": 44979264,
1168
+ "step": 116,
1169
+ "train_runtime": 9797.6269,
1170
+ "train_tokens_per_second": 4590.833
1171
+ },
1172
+ {
1173
+ "epoch": 2.253658536585366,
1174
+ "grad_norm": 0.31740577502228223,
1175
+ "learning_rate": 5.841785206735192e-05,
1176
+ "loss": 0.5027,
1177
+ "num_input_tokens_seen": 45372096,
1178
+ "step": 117,
1179
+ "train_runtime": 9883.3105,
1180
+ "train_tokens_per_second": 4590.779
1181
+ },
1182
+ {
1183
+ "epoch": 2.2731707317073173,
1184
+ "grad_norm": 0.3705124747857168,
1185
+ "learning_rate": 5.782172325201155e-05,
1186
+ "loss": 0.5151,
1187
+ "num_input_tokens_seen": 45764928,
1188
+ "step": 118,
1189
+ "train_runtime": 9968.9083,
1190
+ "train_tokens_per_second": 4590.766
1191
+ },
1192
+ {
1193
+ "epoch": 2.292682926829268,
1194
+ "grad_norm": 0.36613629503091577,
1195
+ "learning_rate": 5.7224452478461064e-05,
1196
+ "loss": 0.5152,
1197
+ "num_input_tokens_seen": 46157760,
1198
+ "step": 119,
1199
+ "train_runtime": 10054.434,
1200
+ "train_tokens_per_second": 4590.787
1201
+ },
1202
+ {
1203
+ "epoch": 2.3121951219512193,
1204
+ "grad_norm": 0.3314221690877754,
1205
+ "learning_rate": 5.6626126947211386e-05,
1206
+ "loss": 0.5303,
1207
+ "num_input_tokens_seen": 46550592,
1208
+ "step": 120,
1209
+ "train_runtime": 10139.9414,
1210
+ "train_tokens_per_second": 4590.815
1211
+ },
1212
+ {
1213
+ "epoch": 2.3317073170731706,
1214
+ "grad_norm": 0.3470857883762708,
1215
+ "learning_rate": 5.602683401276615e-05,
1216
+ "loss": 0.5032,
1217
+ "num_input_tokens_seen": 46943424,
1218
+ "step": 121,
1219
+ "train_runtime": 10225.7135,
1220
+ "train_tokens_per_second": 4590.724
1221
+ },
1222
+ {
1223
+ "epoch": 2.351219512195122,
1224
+ "grad_norm": 0.3136083377085674,
1225
+ "learning_rate": 5.542666117086832e-05,
1226
+ "loss": 0.5119,
1227
+ "num_input_tokens_seen": 47336256,
1228
+ "step": 122,
1229
+ "train_runtime": 10311.2617,
1230
+ "train_tokens_per_second": 4590.734
1231
+ },
1232
+ {
1233
+ "epoch": 2.370731707317073,
1234
+ "grad_norm": 0.4227118641608833,
1235
+ "learning_rate": 5.482569604572576e-05,
1236
+ "loss": 0.5045,
1237
+ "num_input_tokens_seen": 47729088,
1238
+ "step": 123,
1239
+ "train_runtime": 10396.9552,
1240
+ "train_tokens_per_second": 4590.679
1241
+ },
1242
+ {
1243
+ "epoch": 2.3902439024390243,
1244
+ "grad_norm": 0.32585539704348276,
1245
+ "learning_rate": 5.422402637721836e-05,
1246
+ "loss": 0.505,
1247
+ "num_input_tokens_seen": 48121920,
1248
+ "step": 124,
1249
+ "train_runtime": 10482.7724,
1250
+ "train_tokens_per_second": 4590.572
1251
+ },
1252
+ {
1253
+ "epoch": 2.4097560975609755,
1254
+ "grad_norm": 0.3224554976871665,
1255
+ "learning_rate": 5.3621740008088126e-05,
1256
+ "loss": 0.5017,
1257
+ "num_input_tokens_seen": 48514752,
1258
+ "step": 125,
1259
+ "train_runtime": 10568.4335,
1260
+ "train_tokens_per_second": 4590.534
1261
+ },
1262
+ {
1263
+ "epoch": 2.4292682926829268,
1264
+ "grad_norm": 0.33513514879191686,
1265
+ "learning_rate": 5.3018924871114305e-05,
1266
+ "loss": 0.5019,
1267
+ "num_input_tokens_seen": 48907584,
1268
+ "step": 126,
1269
+ "train_runtime": 10653.9397,
1270
+ "train_tokens_per_second": 4590.563
1271
+ },
1272
+ {
1273
+ "epoch": 2.448780487804878,
1274
+ "grad_norm": 0.3333825976504885,
1275
+ "learning_rate": 5.2415668976275355e-05,
1276
+ "loss": 0.4921,
1277
+ "num_input_tokens_seen": 49300416,
1278
+ "step": 127,
1279
+ "train_runtime": 10739.489,
1280
+ "train_tokens_per_second": 4590.574
1281
+ },
1282
+ {
1283
+ "epoch": 2.4682926829268292,
1284
+ "grad_norm": 0.32605283067247587,
1285
+ "learning_rate": 5.181206039789962e-05,
1286
+ "loss": 0.4951,
1287
+ "num_input_tokens_seen": 49693248,
1288
+ "step": 128,
1289
+ "train_runtime": 10825.0018,
1290
+ "train_tokens_per_second": 4590.6
1291
+ },
1292
+ {
1293
+ "epoch": 2.4878048780487805,
1294
+ "grad_norm": 0.345821204685631,
1295
+ "learning_rate": 5.1208187261806615e-05,
1296
+ "loss": 0.5076,
1297
+ "num_input_tokens_seen": 50086080,
1298
+ "step": 129,
1299
+ "train_runtime": 10910.4514,
1300
+ "train_tokens_per_second": 4590.652
1301
+ },
1302
+ {
1303
+ "epoch": 2.5073170731707317,
1304
+ "grad_norm": 0.32182997237639754,
1305
+ "learning_rate": 5.060413773244087e-05,
1306
+ "loss": 0.4974,
1307
+ "num_input_tokens_seen": 50478912,
1308
+ "step": 130,
1309
+ "train_runtime": 10996.0971,
1310
+ "train_tokens_per_second": 4590.621
1311
+ },
1312
+ {
1313
+ "epoch": 2.526829268292683,
1314
+ "grad_norm": 0.3424390071269269,
1315
+ "learning_rate": 5e-05,
1316
+ "loss": 0.5063,
1317
+ "num_input_tokens_seen": 50871744,
1318
+ "step": 131,
1319
+ "train_runtime": 11081.5772,
1320
+ "train_tokens_per_second": 4590.659
1321
+ },
1322
+ {
1323
+ "epoch": 2.546341463414634,
1324
+ "grad_norm": 0.32151933822675133,
1325
+ "learning_rate": 4.9395862267559136e-05,
1326
+ "loss": 0.5093,
1327
+ "num_input_tokens_seen": 51264576,
1328
+ "step": 132,
1329
+ "train_runtime": 11167.132,
1330
+ "train_tokens_per_second": 4590.666
1331
+ },
1332
+ {
1333
+ "epoch": 2.5658536585365854,
1334
+ "grad_norm": 0.33150875749489067,
1335
+ "learning_rate": 4.87918127381934e-05,
1336
+ "loss": 0.5019,
1337
+ "num_input_tokens_seen": 51657408,
1338
+ "step": 133,
1339
+ "train_runtime": 11252.7312,
1340
+ "train_tokens_per_second": 4590.655
1341
+ },
1342
+ {
1343
+ "epoch": 2.5853658536585367,
1344
+ "grad_norm": 0.32260765542480235,
1345
+ "learning_rate": 4.81879396021004e-05,
1346
+ "loss": 0.4836,
1347
+ "num_input_tokens_seen": 52050240,
1348
+ "step": 134,
1349
+ "train_runtime": 11338.3707,
1350
+ "train_tokens_per_second": 4590.628
1351
+ },
1352
+ {
1353
+ "epoch": 2.604878048780488,
1354
+ "grad_norm": 0.3284283947692906,
1355
+ "learning_rate": 4.758433102372466e-05,
1356
+ "loss": 0.4878,
1357
+ "num_input_tokens_seen": 52443072,
1358
+ "step": 135,
1359
+ "train_runtime": 11424.0926,
1360
+ "train_tokens_per_second": 4590.568
1361
+ },
1362
+ {
1363
+ "epoch": 2.624390243902439,
1364
+ "grad_norm": 0.3174202627091527,
1365
+ "learning_rate": 4.6981075128885693e-05,
1366
+ "loss": 0.4895,
1367
+ "num_input_tokens_seen": 52835904,
1368
+ "step": 136,
1369
+ "train_runtime": 11509.8066,
1370
+ "train_tokens_per_second": 4590.512
1371
+ },
1372
+ {
1373
+ "epoch": 2.6439024390243904,
1374
+ "grad_norm": 0.3424879966422641,
1375
+ "learning_rate": 4.6378259991911886e-05,
1376
+ "loss": 0.4987,
1377
+ "num_input_tokens_seen": 53228736,
1378
+ "step": 137,
1379
+ "train_runtime": 11595.5338,
1380
+ "train_tokens_per_second": 4590.452
1381
+ },
1382
+ {
1383
+ "epoch": 2.6634146341463416,
1384
+ "grad_norm": 0.31862495887920006,
1385
+ "learning_rate": 4.5775973622781655e-05,
1386
+ "loss": 0.4952,
1387
+ "num_input_tokens_seen": 53621568,
1388
+ "step": 138,
1389
+ "train_runtime": 11681.2149,
1390
+ "train_tokens_per_second": 4590.41
1391
+ },
1392
+ {
1393
+ "epoch": 2.682926829268293,
1394
+ "grad_norm": 0.32882715869402424,
1395
+ "learning_rate": 4.5174303954274244e-05,
1396
+ "loss": 0.4852,
1397
+ "num_input_tokens_seen": 54014400,
1398
+ "step": 139,
1399
+ "train_runtime": 11766.8708,
1400
+ "train_tokens_per_second": 4590.379
1401
+ },
1402
+ {
1403
+ "epoch": 2.7024390243902436,
1404
+ "grad_norm": 0.35853159941406193,
1405
+ "learning_rate": 4.457333882913169e-05,
1406
+ "loss": 0.4966,
1407
+ "num_input_tokens_seen": 54407232,
1408
+ "step": 140,
1409
+ "train_runtime": 11852.5434,
1410
+ "train_tokens_per_second": 4590.342
1411
+ },
1412
+ {
1413
+ "epoch": 2.721951219512195,
1414
+ "grad_norm": 0.3220506479281512,
1415
+ "learning_rate": 4.397316598723385e-05,
1416
+ "loss": 0.4851,
1417
+ "num_input_tokens_seen": 54800064,
1418
+ "step": 141,
1419
+ "train_runtime": 11938.0,
1420
+ "train_tokens_per_second": 4590.389
1421
+ },
1422
+ {
1423
+ "epoch": 2.741463414634146,
1424
+ "grad_norm": 0.338780671076278,
1425
+ "learning_rate": 4.337387305278864e-05,
1426
+ "loss": 0.4897,
1427
+ "num_input_tokens_seen": 55192896,
1428
+ "step": 142,
1429
+ "train_runtime": 12023.5136,
1430
+ "train_tokens_per_second": 4590.413
1431
+ },
1432
+ {
1433
+ "epoch": 2.7609756097560973,
1434
+ "grad_norm": 0.3467288544912658,
1435
+ "learning_rate": 4.277554752153895e-05,
1436
+ "loss": 0.4992,
1437
+ "num_input_tokens_seen": 55585728,
1438
+ "step": 143,
1439
+ "train_runtime": 12109.1196,
1440
+ "train_tokens_per_second": 4590.402
1441
+ },
1442
+ {
1443
+ "epoch": 2.7804878048780486,
1444
+ "grad_norm": 0.3172644652222353,
1445
+ "learning_rate": 4.2178276747988446e-05,
1446
+ "loss": 0.4842,
1447
+ "num_input_tokens_seen": 55978560,
1448
+ "step": 144,
1449
+ "train_runtime": 12194.6967,
1450
+ "train_tokens_per_second": 4590.402
1451
+ },
1452
+ {
1453
+ "epoch": 2.8,
1454
+ "grad_norm": 0.33792230752401564,
1455
+ "learning_rate": 4.1582147932648074e-05,
1456
+ "loss": 0.4941,
1457
+ "num_input_tokens_seen": 56371392,
1458
+ "step": 145,
1459
+ "train_runtime": 12280.2743,
1460
+ "train_tokens_per_second": 4590.402
1461
+ },
1462
+ {
1463
+ "epoch": 2.819512195121951,
1464
+ "grad_norm": 0.33669423565884143,
1465
+ "learning_rate": 4.0987248109304714e-05,
1466
+ "loss": 0.4933,
1467
+ "num_input_tokens_seen": 56764224,
1468
+ "step": 146,
1469
+ "train_runtime": 12365.7877,
1470
+ "train_tokens_per_second": 4590.425
1471
+ },
1472
+ {
1473
+ "epoch": 2.8390243902439023,
1474
+ "grad_norm": 0.32219243021646965,
1475
+ "learning_rate": 4.039366413231458e-05,
1476
+ "loss": 0.4847,
1477
+ "num_input_tokens_seen": 57157056,
1478
+ "step": 147,
1479
+ "train_runtime": 12451.3235,
1480
+ "train_tokens_per_second": 4590.44
1481
+ },
1482
+ {
1483
+ "epoch": 2.8585365853658535,
1484
+ "grad_norm": 0.32466591195010414,
1485
+ "learning_rate": 3.980148266392258e-05,
1486
+ "loss": 0.492,
1487
+ "num_input_tokens_seen": 57549888,
1488
+ "step": 148,
1489
+ "train_runtime": 12536.8522,
1490
+ "train_tokens_per_second": 4590.458
1491
+ },
1492
+ {
1493
+ "epoch": 2.8780487804878048,
1494
+ "grad_norm": 0.3484362997993096,
1495
+ "learning_rate": 3.92107901616097e-05,
1496
+ "loss": 0.4933,
1497
+ "num_input_tokens_seen": 57942720,
1498
+ "step": 149,
1499
+ "train_runtime": 12622.324,
1500
+ "train_tokens_per_second": 4590.495
1501
+ },
1502
+ {
1503
+ "epoch": 2.897560975609756,
1504
+ "grad_norm": 0.30189538682359085,
1505
+ "learning_rate": 3.86216728654705e-05,
1506
+ "loss": 0.4812,
1507
+ "num_input_tokens_seen": 58335552,
1508
+ "step": 150,
1509
+ "train_runtime": 12707.9974,
1510
+ "train_tokens_per_second": 4590.46
1511
+ },
1512
+ {
1513
+ "epoch": 2.9170731707317072,
1514
+ "grad_norm": 0.3427990142333379,
1515
+ "learning_rate": 3.803421678562213e-05,
1516
+ "loss": 0.4822,
1517
+ "num_input_tokens_seen": 58728384,
1518
+ "step": 151,
1519
+ "train_runtime": 12793.4446,
1520
+ "train_tokens_per_second": 4590.506
1521
+ },
1522
+ {
1523
+ "epoch": 2.9365853658536585,
1524
+ "grad_norm": 0.38332155033085324,
1525
+ "learning_rate": 3.744850768964692e-05,
1526
+ "loss": 0.4814,
1527
+ "num_input_tokens_seen": 59121216,
1528
+ "step": 152,
1529
+ "train_runtime": 12878.9545,
1530
+ "train_tokens_per_second": 4590.529
1531
+ },
1532
+ {
1533
+ "epoch": 2.9560975609756097,
1534
+ "grad_norm": 0.33602591668846876,
1535
+ "learning_rate": 3.6864631090070655e-05,
1536
+ "loss": 0.4921,
1537
+ "num_input_tokens_seen": 59514048,
1538
+ "step": 153,
1539
+ "train_runtime": 12964.4255,
1540
+ "train_tokens_per_second": 4590.566
1541
+ },
1542
+ {
1543
+ "epoch": 2.975609756097561,
1544
+ "grad_norm": 0.5609630326555886,
1545
+ "learning_rate": 3.628267223187771e-05,
1546
+ "loss": 0.4768,
1547
+ "num_input_tokens_seen": 59906880,
1548
+ "step": 154,
1549
+ "train_runtime": 13049.8908,
1550
+ "train_tokens_per_second": 4590.604
1551
+ },
1552
+ {
1553
+ "epoch": 2.995121951219512,
1554
+ "grad_norm": 0.3265623784171003,
1555
+ "learning_rate": 3.570271608006555e-05,
1556
+ "loss": 0.4579,
1557
+ "num_input_tokens_seen": 60299712,
1558
+ "step": 155,
1559
+ "train_runtime": 13135.5526,
1560
+ "train_tokens_per_second": 4590.573
1561
+ },
1562
+ {
1563
+ "epoch": 3.0,
1564
+ "grad_norm": 0.5314640992941223,
1565
+ "learning_rate": 3.512484730723986e-05,
1566
+ "loss": 0.4418,
1567
+ "num_input_tokens_seen": 60397920,
1568
+ "step": 156,
1569
+ "train_runtime": 13156.9996,
1570
+ "train_tokens_per_second": 4590.554
1571
+ },
1572
+ {
1573
+ "epoch": 3.0195121951219512,
1574
+ "grad_norm": 0.3203324244604146,
1575
+ "learning_rate": 3.4549150281252636e-05,
1576
+ "loss": 0.4471,
1577
+ "num_input_tokens_seen": 60790752,
1578
+ "step": 157,
1579
+ "train_runtime": 13242.5524,
1580
+ "train_tokens_per_second": 4590.562
1581
+ },
1582
+ {
1583
+ "epoch": 3.0390243902439025,
1584
+ "grad_norm": 0.32182085188193466,
1585
+ "learning_rate": 3.397570905288453e-05,
1586
+ "loss": 0.4543,
1587
+ "num_input_tokens_seen": 61183584,
1588
+ "step": 158,
1589
+ "train_runtime": 13328.1385,
1590
+ "train_tokens_per_second": 4590.557
1591
+ },
1592
+ {
1593
+ "epoch": 3.0585365853658537,
1594
+ "grad_norm": 0.34726844831899983,
1595
+ "learning_rate": 3.340460734357359e-05,
1596
+ "loss": 0.4561,
1597
+ "num_input_tokens_seen": 61576416,
1598
+ "step": 159,
1599
+ "train_runtime": 13413.7011,
1600
+ "train_tokens_per_second": 4590.561
1601
+ },
1602
+ {
1603
+ "epoch": 3.078048780487805,
1604
+ "grad_norm": 0.32939131628064333,
1605
+ "learning_rate": 3.283592853319209e-05,
1606
+ "loss": 0.4521,
1607
+ "num_input_tokens_seen": 61969248,
1608
+ "step": 160,
1609
+ "train_runtime": 13499.28,
1610
+ "train_tokens_per_second": 4590.56
1611
+ },
1612
+ {
1613
+ "epoch": 3.097560975609756,
1614
+ "grad_norm": 0.325117680484632,
1615
+ "learning_rate": 3.226975564787322e-05,
1616
+ "loss": 0.444,
1617
+ "num_input_tokens_seen": 62362080,
1618
+ "step": 161,
1619
+ "train_runtime": 13585.0343,
1620
+ "train_tokens_per_second": 4590.499
1621
+ },
1622
+ {
1623
+ "epoch": 3.1170731707317074,
1624
+ "grad_norm": 0.32737548048736476,
1625
+ "learning_rate": 3.170617134788939e-05,
1626
+ "loss": 0.4541,
1627
+ "num_input_tokens_seen": 62754912,
1628
+ "step": 162,
1629
+ "train_runtime": 13670.9946,
1630
+ "train_tokens_per_second": 4590.369
1631
+ },
1632
+ {
1633
+ "epoch": 3.1365853658536587,
1634
+ "grad_norm": 0.39482140502206775,
1635
+ "learning_rate": 3.114525791558398e-05,
1636
+ "loss": 0.4582,
1637
+ "num_input_tokens_seen": 63147744,
1638
+ "step": 163,
1639
+ "train_runtime": 13756.507,
1640
+ "train_tokens_per_second": 4590.391
1641
+ },
1642
+ {
1643
+ "epoch": 3.15609756097561,
1644
+ "grad_norm": 0.3357742173716643,
1645
+ "learning_rate": 3.0587097243358253e-05,
1646
+ "loss": 0.4429,
1647
+ "num_input_tokens_seen": 63540576,
1648
+ "step": 164,
1649
+ "train_runtime": 13842.0341,
1650
+ "train_tokens_per_second": 4590.407
1651
+ },
1652
+ {
1653
+ "epoch": 3.175609756097561,
1654
+ "grad_norm": 0.3305601057024288,
1655
+ "learning_rate": 3.003177082171523e-05,
1656
+ "loss": 0.4443,
1657
+ "num_input_tokens_seen": 63933408,
1658
+ "step": 165,
1659
+ "train_runtime": 13927.6175,
1660
+ "train_tokens_per_second": 4590.405
1661
+ },
1662
+ {
1663
+ "epoch": 3.1951219512195124,
1664
+ "grad_norm": 0.32083308624205303,
1665
+ "learning_rate": 2.9479359727362173e-05,
1666
+ "loss": 0.4521,
1667
+ "num_input_tokens_seen": 64326240,
1668
+ "step": 166,
1669
+ "train_runtime": 14013.2528,
1670
+ "train_tokens_per_second": 4590.386
1671
+ },
1672
+ {
1673
+ "epoch": 3.2146341463414636,
1674
+ "grad_norm": 0.3335280375327217,
1675
+ "learning_rate": 2.8929944611373554e-05,
1676
+ "loss": 0.4362,
1677
+ "num_input_tokens_seen": 64719072,
1678
+ "step": 167,
1679
+ "train_runtime": 14099.4999,
1680
+ "train_tokens_per_second": 4590.168
1681
+ },
1682
+ {
1683
+ "epoch": 3.234146341463415,
1684
+ "grad_norm": 0.31479589078333686,
1685
+ "learning_rate": 2.8383605687416125e-05,
1686
+ "loss": 0.4503,
1687
+ "num_input_tokens_seen": 65111904,
1688
+ "step": 168,
1689
+ "train_runtime": 14184.9981,
1690
+ "train_tokens_per_second": 4590.195
1691
+ },
1692
+ {
1693
+ "epoch": 3.253658536585366,
1694
+ "grad_norm": 0.3729374255823192,
1695
+ "learning_rate": 2.784042272003794e-05,
1696
+ "loss": 0.448,
1697
+ "num_input_tokens_seen": 65504736,
1698
+ "step": 169,
1699
+ "train_runtime": 14270.519,
1700
+ "train_tokens_per_second": 4590.214
1701
+ },
1702
+ {
1703
+ "epoch": 3.2731707317073173,
1704
+ "grad_norm": 0.32769391044042884,
1705
+ "learning_rate": 2.7300475013022663e-05,
1706
+ "loss": 0.4495,
1707
+ "num_input_tokens_seen": 65897568,
1708
+ "step": 170,
1709
+ "train_runtime": 14355.9895,
1710
+ "train_tokens_per_second": 4590.249
1711
+ },
1712
+ {
1713
+ "epoch": 3.292682926829268,
1714
+ "grad_norm": 0.3747118383247798,
1715
+ "learning_rate": 2.6763841397811573e-05,
1716
+ "loss": 0.4512,
1717
+ "num_input_tokens_seen": 66290400,
1718
+ "step": 171,
1719
+ "train_runtime": 14441.5179,
1720
+ "train_tokens_per_second": 4590.265
1721
+ },
1722
+ {
1723
+ "epoch": 3.3121951219512193,
1724
+ "grad_norm": 0.35364574417316713,
1725
+ "learning_rate": 2.6230600221994196e-05,
1726
+ "loss": 0.4342,
1727
+ "num_input_tokens_seen": 66683232,
1728
+ "step": 172,
1729
+ "train_runtime": 14527.0516,
1730
+ "train_tokens_per_second": 4590.28
1731
+ },
1732
+ {
1733
+ "epoch": 3.3317073170731706,
1734
+ "grad_norm": 0.34897968261286955,
1735
+ "learning_rate": 2.57008293378697e-05,
1736
+ "loss": 0.4456,
1737
+ "num_input_tokens_seen": 67076064,
1738
+ "step": 173,
1739
+ "train_runtime": 14612.5638,
1740
+ "train_tokens_per_second": 4590.301
1741
+ },
1742
+ {
1743
+ "epoch": 3.351219512195122,
1744
+ "grad_norm": 0.3526301707012528,
1745
+ "learning_rate": 2.5174606091080627e-05,
1746
+ "loss": 0.4459,
1747
+ "num_input_tokens_seen": 67468896,
1748
+ "step": 174,
1749
+ "train_runtime": 14698.0406,
1750
+ "train_tokens_per_second": 4590.333
1751
+ },
1752
+ {
1753
+ "epoch": 3.370731707317073,
1754
+ "grad_norm": 0.35117048748443935,
1755
+ "learning_rate": 2.4652007309320498e-05,
1756
+ "loss": 0.4383,
1757
+ "num_input_tokens_seen": 67861728,
1758
+ "step": 175,
1759
+ "train_runtime": 14783.5396,
1760
+ "train_tokens_per_second": 4590.357
1761
+ },
1762
+ {
1763
+ "epoch": 3.3902439024390243,
1764
+ "grad_norm": 0.3122289166658474,
1765
+ "learning_rate": 2.4133109291117156e-05,
1766
+ "loss": 0.4447,
1767
+ "num_input_tokens_seen": 68254560,
1768
+ "step": 176,
1769
+ "train_runtime": 14868.9762,
1770
+ "train_tokens_per_second": 4590.401
1771
+ },
1772
+ {
1773
+ "epoch": 3.4097560975609755,
1774
+ "grad_norm": 0.3521930408336,
1775
+ "learning_rate": 2.361798779469336e-05,
1776
+ "loss": 0.442,
1777
+ "num_input_tokens_seen": 68647392,
1778
+ "step": 177,
1779
+ "train_runtime": 14954.4517,
1780
+ "train_tokens_per_second": 4590.432
1781
+ },
1782
+ {
1783
+ "epoch": 3.4292682926829268,
1784
+ "grad_norm": 0.3077364343260739,
1785
+ "learning_rate": 2.3106718026906072e-05,
1786
+ "loss": 0.4418,
1787
+ "num_input_tokens_seen": 69040224,
1788
+ "step": 178,
1789
+ "train_runtime": 15039.9655,
1790
+ "train_tokens_per_second": 4590.451
1791
+ },
1792
+ {
1793
+ "epoch": 3.448780487804878,
1794
+ "grad_norm": 0.33010430660817575,
1795
+ "learning_rate": 2.259937463226651e-05,
1796
+ "loss": 0.4325,
1797
+ "num_input_tokens_seen": 69433056,
1798
+ "step": 179,
1799
+ "train_runtime": 15125.4865,
1800
+ "train_tokens_per_second": 4590.468
1801
+ },
1802
+ {
1803
+ "epoch": 3.4682926829268292,
1804
+ "grad_norm": 0.325795518062987,
1805
+ "learning_rate": 2.209603168204209e-05,
1806
+ "loss": 0.4319,
1807
+ "num_input_tokens_seen": 69825888,
1808
+ "step": 180,
1809
+ "train_runtime": 15211.012,
1810
+ "train_tokens_per_second": 4590.483
1811
+ },
1812
+ {
1813
+ "epoch": 3.4878048780487805,
1814
+ "grad_norm": 0.3179877620527606,
1815
+ "learning_rate": 2.1596762663442218e-05,
1816
+ "loss": 0.4401,
1817
+ "num_input_tokens_seen": 70218720,
1818
+ "step": 181,
1819
+ "train_runtime": 15296.5238,
1820
+ "train_tokens_per_second": 4590.502
1821
+ },
1822
+ {
1823
+ "epoch": 3.5073170731707317,
1824
+ "grad_norm": 0.3287765696670568,
1825
+ "learning_rate": 2.1101640468889255e-05,
1826
+ "loss": 0.4386,
1827
+ "num_input_tokens_seen": 70611552,
1828
+ "step": 182,
1829
+ "train_runtime": 15382.0311,
1830
+ "train_tokens_per_second": 4590.522
1831
+ },
1832
+ {
1833
+ "epoch": 3.526829268292683,
1834
+ "grad_norm": 0.31971467799401765,
1835
+ "learning_rate": 2.061073738537635e-05,
1836
+ "loss": 0.4202,
1837
+ "num_input_tokens_seen": 71004384,
1838
+ "step": 183,
1839
+ "train_runtime": 15467.521,
1840
+ "train_tokens_per_second": 4590.547
1841
+ },
1842
+ {
1843
+ "epoch": 3.546341463414634,
1844
+ "grad_norm": 0.3239181143254052,
1845
+ "learning_rate": 2.0124125083913637e-05,
1846
+ "loss": 0.4419,
1847
+ "num_input_tokens_seen": 71397216,
1848
+ "step": 184,
1849
+ "train_runtime": 15553.2048,
1850
+ "train_tokens_per_second": 4590.515
1851
+ },
1852
+ {
1853
+ "epoch": 3.5658536585365854,
1854
+ "grad_norm": 0.3040342295851482,
1855
+ "learning_rate": 1.9641874609064443e-05,
1856
+ "loss": 0.4336,
1857
+ "num_input_tokens_seen": 71790048,
1858
+ "step": 185,
1859
+ "train_runtime": 15638.8236,
1860
+ "train_tokens_per_second": 4590.502
1861
+ },
1862
+ {
1863
+ "epoch": 3.5853658536585367,
1864
+ "grad_norm": 0.3144952842609461,
1865
+ "learning_rate": 1.9164056368572846e-05,
1866
+ "loss": 0.4387,
1867
+ "num_input_tokens_seen": 72182880,
1868
+ "step": 186,
1869
+ "train_runtime": 15724.5242,
1870
+ "train_tokens_per_second": 4590.465
1871
+ },
1872
+ {
1873
+ "epoch": 3.604878048780488,
1874
+ "grad_norm": 0.31728573983048836,
1875
+ "learning_rate": 1.8690740123084316e-05,
1876
+ "loss": 0.4297,
1877
+ "num_input_tokens_seen": 72575712,
1878
+ "step": 187,
1879
+ "train_runtime": 15810.1041,
1880
+ "train_tokens_per_second": 4590.464
1881
+ },
1882
+ {
1883
+ "epoch": 3.624390243902439,
1884
+ "grad_norm": 0.35290725632142694,
1885
+ "learning_rate": 1.8221994975960736e-05,
1886
+ "loss": 0.446,
1887
+ "num_input_tokens_seen": 72968544,
1888
+ "step": 188,
1889
+ "train_runtime": 15895.683,
1890
+ "train_tokens_per_second": 4590.463
1891
+ },
1892
+ {
1893
+ "epoch": 3.6439024390243904,
1894
+ "grad_norm": 0.31295107237929615,
1895
+ "learning_rate": 1.7757889363191483e-05,
1896
+ "loss": 0.4318,
1897
+ "num_input_tokens_seen": 73361376,
1898
+ "step": 189,
1899
+ "train_runtime": 15981.3241,
1900
+ "train_tokens_per_second": 4590.444
1901
+ },
1902
+ {
1903
+ "epoch": 3.6634146341463416,
1904
+ "grad_norm": 0.30434189898649333,
1905
+ "learning_rate": 1.7298491043401795e-05,
1906
+ "loss": 0.4393,
1907
+ "num_input_tokens_seen": 73754208,
1908
+ "step": 190,
1909
+ "train_runtime": 16066.87,
1910
+ "train_tokens_per_second": 4590.453
1911
+ },
1912
+ {
1913
+ "epoch": 3.682926829268293,
1914
+ "grad_norm": 0.3000645994101256,
1915
+ "learning_rate": 1.684386708796025e-05,
1916
+ "loss": 0.4437,
1917
+ "num_input_tokens_seen": 74147040,
1918
+ "step": 191,
1919
+ "train_runtime": 16152.3518,
1920
+ "train_tokens_per_second": 4590.48
1921
+ },
1922
+ {
1923
+ "epoch": 3.7024390243902436,
1924
+ "grad_norm": 0.3161645570263139,
1925
+ "learning_rate": 1.6394083871186362e-05,
1926
+ "loss": 0.4378,
1927
+ "num_input_tokens_seen": 74539872,
1928
+ "step": 192,
1929
+ "train_runtime": 16237.7807,
1930
+ "train_tokens_per_second": 4590.521
1931
+ },
1932
+ {
1933
+ "epoch": 3.721951219512195,
1934
+ "grad_norm": 0.3085852095656564,
1935
+ "learning_rate": 1.5949207060660138e-05,
1936
+ "loss": 0.4388,
1937
+ "num_input_tokens_seen": 74932704,
1938
+ "step": 193,
1939
+ "train_runtime": 16323.3128,
1940
+ "train_tokens_per_second": 4590.533
1941
+ },
1942
+ {
1943
+ "epoch": 3.741463414634146,
1944
+ "grad_norm": 0.3214095905441243,
1945
+ "learning_rate": 1.550930160763462e-05,
1946
+ "loss": 0.4483,
1947
+ "num_input_tokens_seen": 75325536,
1948
+ "step": 194,
1949
+ "train_runtime": 16408.7376,
1950
+ "train_tokens_per_second": 4590.575
1951
+ },
1952
+ {
1953
+ "epoch": 3.7609756097560973,
1954
+ "grad_norm": 0.3029571828988472,
1955
+ "learning_rate": 1.5074431737553157e-05,
1956
+ "loss": 0.4336,
1957
+ "num_input_tokens_seen": 75718368,
1958
+ "step": 195,
1959
+ "train_runtime": 16494.1425,
1960
+ "train_tokens_per_second": 4590.622
1961
+ },
1962
+ {
1963
+ "epoch": 3.7804878048780486,
1964
+ "grad_norm": 0.3002722038512263,
1965
+ "learning_rate": 1.4644660940672627e-05,
1966
+ "loss": 0.4334,
1967
+ "num_input_tokens_seen": 76111200,
1968
+ "step": 196,
1969
+ "train_runtime": 16579.5744,
1970
+ "train_tokens_per_second": 4590.661
1971
+ },
1972
+ {
1973
+ "epoch": 3.8,
1974
+ "grad_norm": 0.30486554391008086,
1975
+ "learning_rate": 1.422005196279395e-05,
1976
+ "loss": 0.4352,
1977
+ "num_input_tokens_seen": 76504032,
1978
+ "step": 197,
1979
+ "train_runtime": 16665.0963,
1980
+ "train_tokens_per_second": 4590.674
1981
+ },
1982
+ {
1983
+ "epoch": 3.819512195121951,
1984
+ "grad_norm": 0.3019225596466138,
1985
+ "learning_rate": 1.3800666796101292e-05,
1986
+ "loss": 0.4315,
1987
+ "num_input_tokens_seen": 76896864,
1988
+ "step": 198,
1989
+ "train_runtime": 16750.5821,
1990
+ "train_tokens_per_second": 4590.698
1991
+ },
1992
+ {
1993
+ "epoch": 3.8390243902439023,
1994
+ "grad_norm": 0.29879067901515843,
1995
+ "learning_rate": 1.338656667011134e-05,
1996
+ "loss": 0.4339,
1997
+ "num_input_tokens_seen": 77289696,
1998
+ "step": 199,
1999
+ "train_runtime": 16836.0019,
2000
+ "train_tokens_per_second": 4590.739
2001
+ },
2002
+ {
2003
+ "epoch": 3.8585365853658535,
2004
+ "grad_norm": 0.3064182382633002,
2005
+ "learning_rate": 1.297781204273385e-05,
2006
+ "loss": 0.4374,
2007
+ "num_input_tokens_seen": 77682528,
2008
+ "step": 200,
2009
+ "train_runtime": 16921.5579,
2010
+ "train_tokens_per_second": 4590.743
2011
+ },
2012
+ {
2013
+ "epoch": 3.8780487804878048,
2014
+ "grad_norm": 0.31545962722826715,
2015
+ "learning_rate": 1.257446259144494e-05,
2016
+ "loss": 0.4437,
2017
+ "num_input_tokens_seen": 78075360,
2018
+ "step": 201,
2019
+ "train_runtime": 17007.1429,
2020
+ "train_tokens_per_second": 4590.739
2021
+ },
2022
+ {
2023
+ "epoch": 3.897560975609756,
2024
+ "grad_norm": 0.3062038073298703,
2025
+ "learning_rate": 1.2176577204574318e-05,
2026
+ "loss": 0.4319,
2027
+ "num_input_tokens_seen": 78468192,
2028
+ "step": 202,
2029
+ "train_runtime": 17092.564,
2030
+ "train_tokens_per_second": 4590.779
2031
+ },
2032
+ {
2033
+ "epoch": 3.9170731707317072,
2034
+ "grad_norm": 0.4440664308923016,
2035
+ "learning_rate": 1.178421397270758e-05,
2036
+ "loss": 0.4322,
2037
+ "num_input_tokens_seen": 78861024,
2038
+ "step": 203,
2039
+ "train_runtime": 17178.0234,
2040
+ "train_tokens_per_second": 4590.809
2041
+ },
2042
+ {
2043
+ "epoch": 3.9365853658536585,
2044
+ "grad_norm": 0.3017158051173835,
2045
+ "learning_rate": 1.1397430180205171e-05,
2046
+ "loss": 0.4375,
2047
+ "num_input_tokens_seen": 79253856,
2048
+ "step": 204,
2049
+ "train_runtime": 17263.8463,
2050
+ "train_tokens_per_second": 4590.742
2051
+ },
2052
+ {
2053
+ "epoch": 3.9560975609756097,
2054
+ "grad_norm": 0.3075889998265315,
2055
+ "learning_rate": 1.1016282296838887e-05,
2056
+ "loss": 0.4182,
2057
+ "num_input_tokens_seen": 79646688,
2058
+ "step": 205,
2059
+ "train_runtime": 17349.3107,
2060
+ "train_tokens_per_second": 4590.77
2061
+ },
2062
+ {
2063
+ "epoch": 3.975609756097561,
2064
+ "grad_norm": 0.30086206726131764,
2065
+ "learning_rate": 1.0640825969547496e-05,
2066
+ "loss": 0.4382,
2067
+ "num_input_tokens_seen": 80039520,
2068
+ "step": 206,
2069
+ "train_runtime": 17434.78,
2070
+ "train_tokens_per_second": 4590.796
2071
+ },
2072
+ {
2073
+ "epoch": 3.995121951219512,
2074
+ "grad_norm": 0.30615268970962656,
2075
+ "learning_rate": 1.0271116014312293e-05,
2076
+ "loss": 0.4277,
2077
+ "num_input_tokens_seen": 80432352,
2078
+ "step": 207,
2079
+ "train_runtime": 17520.2702,
2080
+ "train_tokens_per_second": 4590.817
2081
+ },
2082
+ {
2083
+ "epoch": 4.0,
2084
+ "grad_norm": 0.5575452767715656,
2085
+ "learning_rate": 9.90720640815408e-06,
2086
+ "loss": 0.4029,
2087
+ "num_input_tokens_seen": 80530560,
2088
+ "step": 208,
2089
+ "train_runtime": 17541.5926,
2090
+ "train_tokens_per_second": 4590.835
2091
+ },
2092
+ {
2093
+ "epoch": 4.019512195121951,
2094
+ "grad_norm": 0.30272512353274567,
2095
+ "learning_rate": 9.549150281252633e-06,
2096
+ "loss": 0.4243,
2097
+ "num_input_tokens_seen": 80923392,
2098
+ "step": 209,
2099
+ "train_runtime": 17627.0549,
2100
+ "train_tokens_per_second": 4590.863
2101
+ },
2102
+ {
2103
+ "epoch": 4.0390243902439025,
2104
+ "grad_norm": 0.30162450457883366,
2105
+ "learning_rate": 9.196999909189762e-06,
2106
+ "loss": 0.4146,
2107
+ "num_input_tokens_seen": 81316224,
2108
+ "step": 210,
2109
+ "train_runtime": 17713.1477,
2110
+ "train_tokens_per_second": 4590.727
2111
+ },
2112
+ {
2113
+ "epoch": 4.058536585365854,
2114
+ "grad_norm": 0.30526033181573403,
2115
+ "learning_rate": 8.850806705317183e-06,
2116
+ "loss": 0.4287,
2117
+ "num_input_tokens_seen": 81709056,
2118
+ "step": 211,
2119
+ "train_runtime": 17798.6213,
2120
+ "train_tokens_per_second": 4590.752
2121
+ },
2122
+ {
2123
+ "epoch": 4.078048780487805,
2124
+ "grad_norm": 0.3107654508084313,
2125
+ "learning_rate": 8.510621213250247e-06,
2126
+ "loss": 0.4221,
2127
+ "num_input_tokens_seen": 82101888,
2128
+ "step": 212,
2129
+ "train_runtime": 17884.1454,
2130
+ "train_tokens_per_second": 4590.764
2131
+ },
2132
+ {
2133
+ "epoch": 4.097560975609756,
2134
+ "grad_norm": 0.325160265062242,
2135
+ "learning_rate": 8.176493099488663e-06,
2136
+ "loss": 0.4232,
2137
+ "num_input_tokens_seen": 82494720,
2138
+ "step": 213,
2139
+ "train_runtime": 17969.6704,
2140
+ "train_tokens_per_second": 4590.775
2141
+ },
2142
+ {
2143
+ "epoch": 4.117073170731707,
2144
+ "grad_norm": 0.3137465946251142,
2145
+ "learning_rate": 7.848471146165288e-06,
2146
+ "loss": 0.416,
2147
+ "num_input_tokens_seen": 82887552,
2148
+ "step": 214,
2149
+ "train_runtime": 18055.2394,
2150
+ "train_tokens_per_second": 4590.776
2151
+ },
2152
+ {
2153
+ "epoch": 4.136585365853659,
2154
+ "grad_norm": 0.31293023871568965,
2155
+ "learning_rate": 7.526603243923957e-06,
2156
+ "loss": 0.4157,
2157
+ "num_input_tokens_seen": 83280384,
2158
+ "step": 215,
2159
+ "train_runtime": 18140.8949,
2160
+ "train_tokens_per_second": 4590.754
2161
+ },
2162
+ {
2163
+ "epoch": 4.15609756097561,
2164
+ "grad_norm": 0.31719664973482375,
2165
+ "learning_rate": 7.21093638492763e-06,
2166
+ "loss": 0.4174,
2167
+ "num_input_tokens_seen": 83673216,
2168
+ "step": 216,
2169
+ "train_runtime": 18226.5244,
2170
+ "train_tokens_per_second": 4590.739
2171
+ },
2172
+ {
2173
+ "epoch": 4.175609756097561,
2174
+ "grad_norm": 0.3958840925349482,
2175
+ "learning_rate": 6.901516655997536e-06,
2176
+ "loss": 0.4041,
2177
+ "num_input_tokens_seen": 84066048,
2178
+ "step": 217,
2179
+ "train_runtime": 18312.181,
2180
+ "train_tokens_per_second": 4590.717
2181
+ },
2182
+ {
2183
+ "epoch": 4.195121951219512,
2184
+ "grad_norm": 0.30791697730474177,
2185
+ "learning_rate": 6.5983892318846275e-06,
2186
+ "loss": 0.4155,
2187
+ "num_input_tokens_seen": 84458880,
2188
+ "step": 218,
2189
+ "train_runtime": 18397.8319,
2190
+ "train_tokens_per_second": 4590.697
2191
+ },
2192
+ {
2193
+ "epoch": 4.214634146341464,
2194
+ "grad_norm": 0.3034741442256561,
2195
+ "learning_rate": 6.301598368674105e-06,
2196
+ "loss": 0.4214,
2197
+ "num_input_tokens_seen": 84851712,
2198
+ "step": 219,
2199
+ "train_runtime": 18483.4309,
2200
+ "train_tokens_per_second": 4590.691
2201
+ },
2202
+ {
2203
+ "epoch": 4.234146341463415,
2204
+ "grad_norm": 0.32535838114327126,
2205
+ "learning_rate": 6.011187397324114e-06,
2206
+ "loss": 0.4193,
2207
+ "num_input_tokens_seen": 85244544,
2208
+ "step": 220,
2209
+ "train_runtime": 18569.0539,
2210
+ "train_tokens_per_second": 4590.678
2211
+ },
2212
+ {
2213
+ "epoch": 4.253658536585366,
2214
+ "grad_norm": 0.3033400736842611,
2215
+ "learning_rate": 5.727198717339511e-06,
2216
+ "loss": 0.4137,
2217
+ "num_input_tokens_seen": 85637376,
2218
+ "step": 221,
2219
+ "train_runtime": 18654.7358,
2220
+ "train_tokens_per_second": 4590.651
2221
+ },
2222
+ {
2223
+ "epoch": 4.273170731707317,
2224
+ "grad_norm": 0.3293082877684599,
2225
+ "learning_rate": 5.449673790581611e-06,
2226
+ "loss": 0.4069,
2227
+ "num_input_tokens_seen": 86030208,
2228
+ "step": 222,
2229
+ "train_runtime": 18740.4478,
2230
+ "train_tokens_per_second": 4590.616
2231
+ },
2232
+ {
2233
+ "epoch": 4.2926829268292686,
2234
+ "grad_norm": 0.2994148389025656,
2235
+ "learning_rate": 5.178653135214812e-06,
2236
+ "loss": 0.4188,
2237
+ "num_input_tokens_seen": 86423040,
2238
+ "step": 223,
2239
+ "train_runtime": 18826.1093,
2240
+ "train_tokens_per_second": 4590.595
2241
+ },
2242
+ {
2243
+ "epoch": 4.31219512195122,
2244
+ "grad_norm": 0.3140763914390958,
2245
+ "learning_rate": 4.914176319791036e-06,
2246
+ "loss": 0.4133,
2247
+ "num_input_tokens_seen": 86815872,
2248
+ "step": 224,
2249
+ "train_runtime": 18911.7783,
2250
+ "train_tokens_per_second": 4590.572
2251
+ },
2252
+ {
2253
+ "epoch": 4.331707317073171,
2254
+ "grad_norm": 0.3058594326378288,
2255
+ "learning_rate": 4.65628195747273e-06,
2256
+ "loss": 0.4136,
2257
+ "num_input_tokens_seen": 87208704,
2258
+ "step": 225,
2259
+ "train_runtime": 18997.5405,
2260
+ "train_tokens_per_second": 4590.526
2261
+ },
2262
+ {
2263
+ "epoch": 4.351219512195122,
2264
+ "grad_norm": 0.3036952677463643,
2265
+ "learning_rate": 4.405007700395497e-06,
2266
+ "loss": 0.4066,
2267
+ "num_input_tokens_seen": 87601536,
2268
+ "step": 226,
2269
+ "train_runtime": 19083.2658,
2270
+ "train_tokens_per_second": 4590.49
2271
+ },
2272
+ {
2273
+ "epoch": 4.3707317073170735,
2274
+ "grad_norm": 0.2986697055188429,
2275
+ "learning_rate": 4.16039023417088e-06,
2276
+ "loss": 0.4002,
2277
+ "num_input_tokens_seen": 87994368,
2278
+ "step": 227,
2279
+ "train_runtime": 19168.9792,
2280
+ "train_tokens_per_second": 4590.457
2281
+ },
2282
+ {
2283
+ "epoch": 4.390243902439025,
2284
+ "grad_norm": 0.30912351105330554,
2285
+ "learning_rate": 3.922465272530351e-06,
2286
+ "loss": 0.4213,
2287
+ "num_input_tokens_seen": 88387200,
2288
+ "step": 228,
2289
+ "train_runtime": 19254.8776,
2290
+ "train_tokens_per_second": 4590.38
2291
+ },
2292
+ {
2293
+ "epoch": 4.409756097560976,
2294
+ "grad_norm": 0.31390545996696395,
2295
+ "learning_rate": 3.691267552111183e-06,
2296
+ "loss": 0.4276,
2297
+ "num_input_tokens_seen": 88780032,
2298
+ "step": 229,
2299
+ "train_runtime": 19340.5507,
2300
+ "train_tokens_per_second": 4590.357
2301
+ },
2302
+ {
2303
+ "epoch": 4.429268292682927,
2304
+ "grad_norm": 0.3064931328623729,
2305
+ "learning_rate": 3.4668308273848982e-06,
2306
+ "loss": 0.417,
2307
+ "num_input_tokens_seen": 89172864,
2308
+ "step": 230,
2309
+ "train_runtime": 19426.2001,
2310
+ "train_tokens_per_second": 4590.34
2311
+ },
2312
+ {
2313
+ "epoch": 4.4487804878048784,
2314
+ "grad_norm": 0.2938965818242203,
2315
+ "learning_rate": 3.249187865729264e-06,
2316
+ "loss": 0.4036,
2317
+ "num_input_tokens_seen": 89565696,
2318
+ "step": 231,
2319
+ "train_runtime": 19511.8505,
2320
+ "train_tokens_per_second": 4590.323
2321
+ },
2322
+ {
2323
+ "epoch": 4.46829268292683,
2324
+ "grad_norm": 0.3009741802462199,
2325
+ "learning_rate": 3.0383704426442394e-06,
2326
+ "loss": 0.4109,
2327
+ "num_input_tokens_seen": 89958528,
2328
+ "step": 232,
2329
+ "train_runtime": 19597.5003,
2330
+ "train_tokens_per_second": 4590.306
2331
+ },
2332
+ {
2333
+ "epoch": 4.487804878048781,
2334
+ "grad_norm": 0.30572185629569554,
2335
+ "learning_rate": 2.8344093371128424e-06,
2336
+ "loss": 0.4177,
2337
+ "num_input_tokens_seen": 90351360,
2338
+ "step": 233,
2339
+ "train_runtime": 19683.1546,
2340
+ "train_tokens_per_second": 4590.289
2341
+ },
2342
+ {
2343
+ "epoch": 4.507317073170732,
2344
+ "grad_norm": 0.3060090032156781,
2345
+ "learning_rate": 2.637334327107466e-06,
2346
+ "loss": 0.4167,
2347
+ "num_input_tokens_seen": 90744192,
2348
+ "step": 234,
2349
+ "train_runtime": 19768.7311,
2350
+ "train_tokens_per_second": 4590.289
2351
+ },
2352
+ {
2353
+ "epoch": 4.526829268292683,
2354
+ "grad_norm": 0.30158648786459336,
2355
+ "learning_rate": 2.4471741852423237e-06,
2356
+ "loss": 0.4214,
2357
+ "num_input_tokens_seen": 91137024,
2358
+ "step": 235,
2359
+ "train_runtime": 19854.2512,
2360
+ "train_tokens_per_second": 4590.303
2361
+ },
2362
+ {
2363
+ "epoch": 4.546341463414635,
2364
+ "grad_norm": 0.30106548477413225,
2365
+ "learning_rate": 2.2639566745727205e-06,
2366
+ "loss": 0.4038,
2367
+ "num_input_tokens_seen": 91529856,
2368
+ "step": 236,
2369
+ "train_runtime": 19939.85,
2370
+ "train_tokens_per_second": 4590.298
2371
+ },
2372
+ {
2373
+ "epoch": 4.565853658536585,
2374
+ "grad_norm": 0.31280608719577163,
2375
+ "learning_rate": 2.087708544541689e-06,
2376
+ "loss": 0.4087,
2377
+ "num_input_tokens_seen": 91922688,
2378
+ "step": 237,
2379
+ "train_runtime": 20025.4609,
2380
+ "train_tokens_per_second": 4590.291
2381
+ },
2382
+ {
2383
+ "epoch": 4.585365853658536,
2384
+ "grad_norm": 0.3151237023853444,
2385
+ "learning_rate": 1.9184555270746194e-06,
2386
+ "loss": 0.4099,
2387
+ "num_input_tokens_seen": 92315520,
2388
+ "step": 238,
2389
+ "train_runtime": 20111.0379,
2390
+ "train_tokens_per_second": 4590.291
2391
+ },
2392
+ {
2393
+ "epoch": 4.6048780487804875,
2394
+ "grad_norm": 0.2979822004749027,
2395
+ "learning_rate": 1.7562223328224325e-06,
2396
+ "loss": 0.4098,
2397
+ "num_input_tokens_seen": 92708352,
2398
+ "step": 239,
2399
+ "train_runtime": 20196.6063,
2400
+ "train_tokens_per_second": 4590.294
2401
+ },
2402
+ {
2403
+ "epoch": 4.624390243902439,
2404
+ "grad_norm": 0.29641279383853864,
2405
+ "learning_rate": 1.601032647553863e-06,
2406
+ "loss": 0.4106,
2407
+ "num_input_tokens_seen": 93101184,
2408
+ "step": 240,
2409
+ "train_runtime": 20282.2193,
2410
+ "train_tokens_per_second": 4590.286
2411
+ },
2412
+ {
2413
+ "epoch": 4.64390243902439,
2414
+ "grad_norm": 0.30405930218468175,
2415
+ "learning_rate": 1.4529091286973995e-06,
2416
+ "loss": 0.4204,
2417
+ "num_input_tokens_seen": 93494016,
2418
+ "step": 241,
2419
+ "train_runtime": 20367.7887,
2420
+ "train_tokens_per_second": 4590.288
2421
+ },
2422
+ {
2423
+ "epoch": 4.663414634146341,
2424
+ "grad_norm": 0.30816493741451845,
2425
+ "learning_rate": 1.3118734020333256e-06,
2426
+ "loss": 0.4069,
2427
+ "num_input_tokens_seen": 93886848,
2428
+ "step": 242,
2429
+ "train_runtime": 20453.3944,
2430
+ "train_tokens_per_second": 4590.282
2431
+ },
2432
+ {
2433
+ "epoch": 4.682926829268292,
2434
+ "grad_norm": 0.295889585872778,
2435
+ "learning_rate": 1.1779460585363944e-06,
2436
+ "loss": 0.4206,
2437
+ "num_input_tokens_seen": 94279680,
2438
+ "step": 243,
2439
+ "train_runtime": 20538.896,
2440
+ "train_tokens_per_second": 4590.299
2441
+ },
2442
+ {
2443
+ "epoch": 4.702439024390244,
2444
+ "grad_norm": 0.30064754265380783,
2445
+ "learning_rate": 1.0511466513695777e-06,
2446
+ "loss": 0.4216,
2447
+ "num_input_tokens_seen": 94672512,
2448
+ "step": 244,
2449
+ "train_runtime": 20624.455,
2450
+ "train_tokens_per_second": 4590.304
2451
+ },
2452
+ {
2453
+ "epoch": 4.721951219512195,
2454
+ "grad_norm": 0.2956056728342269,
2455
+ "learning_rate": 9.314936930293283e-07,
2456
+ "loss": 0.4067,
2457
+ "num_input_tokens_seen": 95065344,
2458
+ "step": 245,
2459
+ "train_runtime": 20710.4849,
2460
+ "train_tokens_per_second": 4590.204
2461
+ },
2462
+ {
2463
+ "epoch": 4.741463414634146,
2464
+ "grad_norm": 1.2801159100545316,
2465
+ "learning_rate": 8.190046526428242e-07,
2466
+ "loss": 0.4149,
2467
+ "num_input_tokens_seen": 95458176,
2468
+ "step": 246,
2469
+ "train_runtime": 20796.2392,
2470
+ "train_tokens_per_second": 4590.165
2471
+ },
2472
+ {
2473
+ "epoch": 4.760975609756097,
2474
+ "grad_norm": 0.2962901875713787,
2475
+ "learning_rate": 7.136959534174592e-07,
2476
+ "loss": 0.4104,
2477
+ "num_input_tokens_seen": 95851008,
2478
+ "step": 247,
2479
+ "train_runtime": 20881.9173,
2480
+ "train_tokens_per_second": 4590.144
2481
+ },
2482
+ {
2483
+ "epoch": 4.780487804878049,
2484
+ "grad_norm": 0.29775756656133795,
2485
+ "learning_rate": 6.15582970243117e-07,
2486
+ "loss": 0.4275,
2487
+ "num_input_tokens_seen": 96243840,
2488
+ "step": 248,
2489
+ "train_runtime": 20967.6297,
2490
+ "train_tokens_per_second": 4590.115
2491
+ },
2492
+ {
2493
+ "epoch": 4.8,
2494
+ "grad_norm": 0.30152743001548155,
2495
+ "learning_rate": 5.246800274474439e-07,
2496
+ "loss": 0.4168,
2497
+ "num_input_tokens_seen": 96636672,
2498
+ "step": 249,
2499
+ "train_runtime": 21053.3487,
2500
+ "train_tokens_per_second": 4590.086
2501
+ },
2502
+ {
2503
+ "epoch": 4.819512195121951,
2504
+ "grad_norm": 0.3006544700138266,
2505
+ "learning_rate": 4.4100039670454395e-07,
2506
+ "loss": 0.4227,
2507
+ "num_input_tokens_seen": 97029504,
2508
+ "step": 250,
2509
+ "train_runtime": 21139.1446,
2510
+ "train_tokens_per_second": 4590.039
2511
+ },
2512
+ {
2513
+ "epoch": 4.839024390243902,
2514
+ "grad_norm": 0.3694815192213994,
2515
+ "learning_rate": 3.6455629509730136e-07,
2516
+ "loss": 0.4224,
2517
+ "num_input_tokens_seen": 97422336,
2518
+ "step": 251,
2519
+ "train_runtime": 21224.947,
2520
+ "train_tokens_per_second": 4589.992
2521
+ },
2522
+ {
2523
+ "epoch": 4.8585365853658535,
2524
+ "grad_norm": 0.29849891867672973,
2525
+ "learning_rate": 2.953588833337406e-07,
2526
+ "loss": 0.409,
2527
+ "num_input_tokens_seen": 97815168,
2528
+ "step": 252,
2529
+ "train_runtime": 21311.0603,
2530
+ "train_tokens_per_second": 4589.878
2531
+ },
2532
+ {
2533
+ "epoch": 4.878048780487805,
2534
+ "grad_norm": 0.29318102693895126,
2535
+ "learning_rate": 2.334182641175686e-07,
2536
+ "loss": 0.4164,
2537
+ "num_input_tokens_seen": 98208000,
2538
+ "step": 253,
2539
+ "train_runtime": 21396.8922,
2540
+ "train_tokens_per_second": 4589.825
2541
+ },
2542
+ {
2543
+ "epoch": 4.897560975609756,
2544
+ "grad_norm": 0.29392220191104595,
2545
+ "learning_rate": 1.7874348067319912e-07,
2546
+ "loss": 0.4048,
2547
+ "num_input_tokens_seen": 98600832,
2548
+ "step": 254,
2549
+ "train_runtime": 21482.6378,
2550
+ "train_tokens_per_second": 4589.792
2551
+ },
2552
+ {
2553
+ "epoch": 4.917073170731707,
2554
+ "grad_norm": 0.29522364415867186,
2555
+ "learning_rate": 1.3134251542544774e-07,
2556
+ "loss": 0.4082,
2557
+ "num_input_tokens_seen": 98993664,
2558
+ "step": 255,
2559
+ "train_runtime": 21568.4139,
2560
+ "train_tokens_per_second": 4589.752
2561
+ },
2562
+ {
2563
+ "epoch": 4.9365853658536585,
2564
+ "grad_norm": 0.29818003556233796,
2565
+ "learning_rate": 9.12222888341252e-08,
2566
+ "loss": 0.4214,
2567
+ "num_input_tokens_seen": 99386496,
2568
+ "step": 256,
2569
+ "train_runtime": 21654.0911,
2570
+ "train_tokens_per_second": 4589.733
2571
+ },
2572
+ {
2573
+ "epoch": 4.95609756097561,
2574
+ "grad_norm": 0.29688566274903394,
2575
+ "learning_rate": 5.838865838366792e-08,
2576
+ "loss": 0.413,
2577
+ "num_input_tokens_seen": 99779328,
2578
+ "step": 257,
2579
+ "train_runtime": 21739.8502,
2580
+ "train_tokens_per_second": 4589.697
2581
+ },
2582
+ {
2583
+ "epoch": 4.975609756097561,
2584
+ "grad_norm": 0.2972951381912236,
2585
+ "learning_rate": 3.284641772793862e-08,
2586
+ "loss": 0.4118,
2587
+ "num_input_tokens_seen": 100172160,
2588
+ "step": 258,
2589
+ "train_runtime": 21825.6482,
2590
+ "train_tokens_per_second": 4589.653
2591
+ },
2592
+ {
2593
+ "epoch": 4.995121951219512,
2594
+ "grad_norm": 0.2934977397098327,
2595
+ "learning_rate": 1.4599295990352924e-08,
2596
+ "loss": 0.4169,
2597
+ "num_input_tokens_seen": 100564992,
2598
+ "step": 259,
2599
+ "train_runtime": 21911.3681,
2600
+ "train_tokens_per_second": 4589.626
2601
+ },
2602
+ {
2603
+ "epoch": 5.0,
2604
+ "grad_norm": 0.598332221412963,
2605
+ "learning_rate": 3.6499572194648167e-09,
2606
+ "loss": 0.3937,
2607
+ "num_input_tokens_seen": 100663200,
2608
+ "step": 260,
2609
+ "train_runtime": 21932.7813,
2610
+ "train_tokens_per_second": 4589.623
2611
+ },
2612
+ {
2613
+ "epoch": 5.0,
2614
+ "num_input_tokens_seen": 100663200,
2615
+ "step": 260,
2616
+ "total_flos": 2377568804143104.0,
2617
+ "train_loss": 0.697018449810835,
2618
+ "train_runtime": 22008.2973,
2619
+ "train_samples_per_second": 4.45,
2620
+ "train_steps_per_second": 0.012
2621
+ }
2622
+ ],
2623
+ "logging_steps": 1,
2624
+ "max_steps": 260,
2625
+ "num_input_tokens_seen": 100663200,
2626
+ "num_train_epochs": 5,
2627
+ "save_steps": 1000,
2628
+ "stateful_callbacks": {
2629
+ "TrainerControl": {
2630
+ "args": {
2631
+ "should_epoch_stop": false,
2632
+ "should_evaluate": false,
2633
+ "should_log": false,
2634
+ "should_save": true,
2635
+ "should_training_stop": true
2636
+ },
2637
+ "attributes": {}
2638
+ }
2639
+ },
2640
+ "total_flos": 2377568804143104.0,
2641
+ "train_batch_size": 24,
2642
+ "trial_name": null,
2643
+ "trial_params": null
2644
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d28367a7f01f97d093d01f03db18770fc51c71f24731638d1414a6bbecdecf30
3
+ size 8081
training_loss.png ADDED