Batch upload part 7
Browse files- nl_tasks/exps/run_ex21/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex22/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex22/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex22/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex22/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex22/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex22/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex22/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex22/trainer_state.json +743 -0
- nl_tasks/exps/run_ex23/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex23/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex23/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex23/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex23/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex23/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex23/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex23/trainer_state.json +1093 -0
- nl_tasks/exps/run_ex24/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex24/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex24/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex24/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex24/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex24/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex24/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex24/trainer_state.json +1093 -0
- nl_tasks/exps/run_ex25/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex25/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex25/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex25/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex25/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex25/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex25/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex25/trainer_state.json +1093 -0
- nl_tasks/exps/run_ex26/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex26/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex26/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex26/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex26/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex26/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex26/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex26/trainer_state.json +1093 -0
- nl_tasks/exps/run_ex27/ft/adapter_config.json +18 -0
- nl_tasks/exps/run_ex27/ft/special_tokens_map.json +24 -0
- nl_tasks/exps/run_ex27/ft/tokenizer.json +0 -0
- nl_tasks/exps/run_ex27/ft/tokenizer.model +3 -0
- nl_tasks/exps/run_ex27/ft/tokenizer_config.json +43 -0
- nl_tasks/exps/run_ex27/ft2/adapter_config.json +18 -0
- nl_tasks/exps/run_ex27/ft2/adapter_model.bin +3 -0
- nl_tasks/exps/run_ex27/trainer_state.json +1093 -0
- nl_tasks/exps/run_ex28/trainer_state.json +1093 -0
nl_tasks/exps/run_ex21/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex22/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex22/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex22/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex22/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex22/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex22/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex22/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:92f573cb0b4609d2cb0f402d7e70f2c7fdbe010e9626324a83ade3e5a4836266
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex22/trainer_state.json
ADDED
|
@@ -0,0 +1,743 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 2.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 2500,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.5767698884010315,
|
| 15 |
+
"learning_rate": 9.6e-05,
|
| 16 |
+
"loss": 0.8383,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.22841285169124603,
|
| 22 |
+
"learning_rate": 0.00019600000000000002,
|
| 23 |
+
"loss": 0.4641,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.20203162729740143,
|
| 29 |
+
"learning_rate": 0.000296,
|
| 30 |
+
"loss": 0.4022,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.15483643114566803,
|
| 36 |
+
"learning_rate": 0.00039600000000000003,
|
| 37 |
+
"loss": 0.3678,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.2592383027076721,
|
| 43 |
+
"learning_rate": 0.000496,
|
| 44 |
+
"loss": 0.3497,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.1872025728225708,
|
| 50 |
+
"learning_rate": 0.000596,
|
| 51 |
+
"loss": 0.3412,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.18230123817920685,
|
| 57 |
+
"learning_rate": 0.000696,
|
| 58 |
+
"loss": 0.335,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.21839694678783417,
|
| 64 |
+
"learning_rate": 0.000796,
|
| 65 |
+
"loss": 0.3342,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.1940021961927414,
|
| 71 |
+
"learning_rate": 0.000896,
|
| 72 |
+
"loss": 0.3304,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.1906810700893402,
|
| 78 |
+
"learning_rate": 0.000996,
|
| 79 |
+
"loss": 0.3266,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.26560282707214355,
|
| 85 |
+
"learning_rate": 0.0009997192908557321,
|
| 86 |
+
"loss": 0.3267,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.24125456809997559,
|
| 92 |
+
"learning_rate": 0.000998830238119205,
|
| 93 |
+
"loss": 0.336,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.197306290268898,
|
| 99 |
+
"learning_rate": 0.000997333437576437,
|
| 100 |
+
"loss": 0.3246,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.22942069172859192,
|
| 106 |
+
"learning_rate": 0.0009952307128483257,
|
| 107 |
+
"loss": 0.3277,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.28770017623901367,
|
| 113 |
+
"learning_rate": 0.0009925246257810518,
|
| 114 |
+
"loss": 0.3185,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.30775460600852966,
|
| 120 |
+
"learning_rate": 0.0009892184733248665,
|
| 121 |
+
"loss": 0.3169,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.19444003701210022,
|
| 127 |
+
"learning_rate": 0.0009853162835172637,
|
| 128 |
+
"loss": 0.3039,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.23387335240840912,
|
| 134 |
+
"learning_rate": 0.0009808228105754376,
|
| 135 |
+
"loss": 0.3074,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.2246161848306656,
|
| 141 |
+
"learning_rate": 0.0009757435291040016,
|
| 142 |
+
"loss": 0.3023,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.23986800014972687,
|
| 148 |
+
"learning_rate": 0.0009700846274250251,
|
| 149 |
+
"loss": 0.3,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.1957164704799652,
|
| 155 |
+
"learning_rate": 0.000963853000038517,
|
| 156 |
+
"loss": 0.2987,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.24343261122703552,
|
| 162 |
+
"learning_rate": 0.0009570562392225395,
|
| 163 |
+
"loss": 0.2847,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.2263241708278656,
|
| 169 |
+
"learning_rate": 0.0009497026257831855,
|
| 170 |
+
"loss": 0.3051,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.22311192750930786,
|
| 176 |
+
"learning_rate": 0.0009418011189656941,
|
| 177 |
+
"loss": 0.2891,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.21272866427898407,
|
| 183 |
+
"learning_rate": 0.0009333613455389882,
|
| 184 |
+
"loss": 0.303,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.2171148806810379,
|
| 190 |
+
"learning_rate": 0.000924393588066941,
|
| 191 |
+
"loss": 0.2924,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.20269569754600525,
|
| 197 |
+
"learning_rate": 0.0009149087723806549,
|
| 198 |
+
"loss": 0.2799,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.22589229047298431,
|
| 204 |
+
"learning_rate": 0.0009049184542670199,
|
| 205 |
+
"loss": 0.2809,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.23199684917926788,
|
| 211 |
+
"learning_rate": 0.0008944348053897671,
|
| 212 |
+
"loss": 0.2872,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.20177872478961945,
|
| 218 |
+
"learning_rate": 0.0008834705984601709,
|
| 219 |
+
"loss": 0.2834,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.23552103340625763,
|
| 225 |
+
"learning_rate": 0.0008720391916754683,
|
| 226 |
+
"loss": 0.2713,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.23510734736919403,
|
| 232 |
+
"learning_rate": 0.0008601545124439535,
|
| 233 |
+
"loss": 0.2763,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.2040044516324997,
|
| 239 |
+
"learning_rate": 0.0008478310404165754,
|
| 240 |
+
"loss": 0.2682,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.23244167864322662,
|
| 246 |
+
"learning_rate": 0.0008350837898457143,
|
| 247 |
+
"loss": 0.2702,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.16786731779575348,
|
| 253 |
+
"learning_rate": 0.0008219282912926269,
|
| 254 |
+
"loss": 0.2776,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.18036706745624542,
|
| 260 |
+
"learning_rate": 0.0008083805727058513,
|
| 261 |
+
"loss": 0.2702,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.1846761405467987,
|
| 267 |
+
"learning_rate": 0.0007944571398936193,
|
| 268 |
+
"loss": 0.2675,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.18422862887382507,
|
| 274 |
+
"learning_rate": 0.0007801749564140723,
|
| 275 |
+
"loss": 0.2638,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.17170637845993042,
|
| 281 |
+
"learning_rate": 0.0007655514229077783,
|
| 282 |
+
"loss": 0.261,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.22848133742809296,
|
| 288 |
+
"learning_rate": 0.0007506043558977322,
|
| 289 |
+
"loss": 0.2679,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.18595680594444275,
|
| 295 |
+
"learning_rate": 0.0007353519660826664,
|
| 296 |
+
"loss": 0.2659,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.19739358127117157,
|
| 302 |
+
"learning_rate": 0.00071981283615012,
|
| 303 |
+
"loss": 0.2561,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.18625503778457642,
|
| 309 |
+
"learning_rate": 0.0007040058981362964,
|
| 310 |
+
"loss": 0.2521,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.20161661505699158,
|
| 316 |
+
"learning_rate": 0.0006879504103602934,
|
| 317 |
+
"loss": 0.2518,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.18369227647781372,
|
| 323 |
+
"learning_rate": 0.0006716659339608077,
|
| 324 |
+
"loss": 0.2626,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.15661346912384033,
|
| 330 |
+
"learning_rate": 0.0006551723090639006,
|
| 331 |
+
"loss": 0.2535,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.19566485285758972,
|
| 337 |
+
"learning_rate": 0.0006384896306108612,
|
| 338 |
+
"loss": 0.256,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.1498168408870697,
|
| 344 |
+
"learning_rate": 0.0006216382238756146,
|
| 345 |
+
"loss": 0.2557,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.18538439273834229,
|
| 351 |
+
"learning_rate": 0.0006046386197015076,
|
| 352 |
+
"loss": 0.2586,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.17976002395153046,
|
| 358 |
+
"learning_rate": 0.0005875115294876381,
|
| 359 |
+
"loss": 0.2464,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.18410831689834595,
|
| 365 |
+
"learning_rate": 0.0005702778199552054,
|
| 366 |
+
"loss": 0.2234,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.14994341135025024,
|
| 372 |
+
"learning_rate": 0.000552958487724626,
|
| 373 |
+
"loss": 0.2305,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.16480422019958496,
|
| 379 |
+
"learning_rate": 0.0005355746337343836,
|
| 380 |
+
"loss": 0.2271,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.16974572837352753,
|
| 386 |
+
"learning_rate": 0.0005181474375327879,
|
| 387 |
+
"loss": 0.2311,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.16218312084674835,
|
| 393 |
+
"learning_rate": 0.0005006981314739573,
|
| 394 |
+
"loss": 0.2351,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.1601516455411911,
|
| 400 |
+
"learning_rate": 0.00048324797484946424,
|
| 401 |
+
"loss": 0.2329,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.15736624598503113,
|
| 407 |
+
"learning_rate": 0.0004658182279871657,
|
| 408 |
+
"loss": 0.2245,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.16985422372817993,
|
| 414 |
+
"learning_rate": 0.00044843012634876645,
|
| 415 |
+
"loss": 0.2305,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.15030717849731445,
|
| 421 |
+
"learning_rate": 0.000431104854657681,
|
| 422 |
+
"loss": 0.2274,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.1833723485469818,
|
| 428 |
+
"learning_rate": 0.0004138635210887117,
|
| 429 |
+
"loss": 0.2292,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.1704292595386505,
|
| 435 |
+
"learning_rate": 0.0003967271315509884,
|
| 436 |
+
"loss": 0.2317,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.15471942722797394,
|
| 442 |
+
"learning_rate": 0.0003797165640955041,
|
| 443 |
+
"loss": 0.2234,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.14952746033668518,
|
| 449 |
+
"learning_rate": 0.0003628525434784268,
|
| 450 |
+
"loss": 0.2214,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.1543506234884262,
|
| 456 |
+
"learning_rate": 0.0003461556159111748,
|
| 457 |
+
"loss": 0.2159,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.1592603474855423,
|
| 463 |
+
"learning_rate": 0.0003296461240280242,
|
| 464 |
+
"loss": 0.2173,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.16056233644485474,
|
| 470 |
+
"learning_rate": 0.00031334418210174266,
|
| 471 |
+
"loss": 0.2123,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.14286746084690094,
|
| 477 |
+
"learning_rate": 0.0002972696515374455,
|
| 478 |
+
"loss": 0.2152,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.16939015686511993,
|
| 484 |
+
"learning_rate": 0.00028144211667453366,
|
| 485 |
+
"loss": 0.2145,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.15761587023735046,
|
| 491 |
+
"learning_rate": 0.00026588086092619277,
|
| 492 |
+
"loss": 0.2153,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.17639687657356262,
|
| 498 |
+
"learning_rate": 0.00025060484328552466,
|
| 499 |
+
"loss": 0.2156,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.15203754603862762,
|
| 505 |
+
"learning_rate": 0.00023563267522693415,
|
| 506 |
+
"loss": 0.2165,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.1759003847837448,
|
| 512 |
+
"learning_rate": 0.0002209825980309151,
|
| 513 |
+
"loss": 0.2028,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.15083126723766327,
|
| 519 |
+
"learning_rate": 0.00020667246055985938,
|
| 520 |
+
"loss": 0.2097,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.19524288177490234,
|
| 526 |
+
"learning_rate": 0.00019271969751196778,
|
| 527 |
+
"loss": 0.2146,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.1552341729402542,
|
| 533 |
+
"learning_rate": 0.00017914130817975592,
|
| 534 |
+
"loss": 0.2146,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.17365647852420807,
|
| 540 |
+
"learning_rate": 0.00016595383573903412,
|
| 541 |
+
"loss": 0.2116,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.17446695268154144,
|
| 547 |
+
"learning_rate": 0.0001531733470935976,
|
| 548 |
+
"loss": 0.2101,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.15145274996757507,
|
| 554 |
+
"learning_rate": 0.00014081541330017704,
|
| 555 |
+
"loss": 0.2001,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.16965870559215546,
|
| 561 |
+
"learning_rate": 0.00012889509059750602,
|
| 562 |
+
"loss": 0.2146,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.1585461050271988,
|
| 568 |
+
"learning_rate": 0.00011742690206261292,
|
| 569 |
+
"loss": 0.2126,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.15166988968849182,
|
| 575 |
+
"learning_rate": 0.0001064248199166884,
|
| 576 |
+
"loss": 0.1989,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.16476476192474365,
|
| 582 |
+
"learning_rate": 9.590224850208645e-05,
|
| 583 |
+
"loss": 0.208,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.13906484842300415,
|
| 589 |
+
"learning_rate": 8.587200795119792e-05,
|
| 590 |
+
"loss": 0.2127,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.15508808195590973,
|
| 596 |
+
"learning_rate": 7.634631856709389e-05,
|
| 597 |
+
"loss": 0.2087,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.1457161009311676,
|
| 603 |
+
"learning_rate": 6.733678593496901e-05,
|
| 604 |
+
"loss": 0.2074,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.1518443375825882,
|
| 610 |
+
"learning_rate": 5.885438678252342e-05,
|
| 611 |
+
"loss": 0.1996,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.1431756168603897,
|
| 617 |
+
"learning_rate": 5.0909455606510726e-05,
|
| 618 |
+
"loss": 0.2175,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.15276211500167847,
|
| 624 |
+
"learning_rate": 4.3511672081746386e-05,
|
| 625 |
+
"loss": 0.2072,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.15774431824684143,
|
| 631 |
+
"learning_rate": 3.667004926791395e-05,
|
| 632 |
+
"loss": 0.2004,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.1650596261024475,
|
| 638 |
+
"learning_rate": 3.0392922628540875e-05,
|
| 639 |
+
"loss": 0.209,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.14472806453704834,
|
| 645 |
+
"learning_rate": 2.468793987551998e-05,
|
| 646 |
+
"loss": 0.2014,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.16512498259544373,
|
| 652 |
+
"learning_rate": 1.9562051651550784e-05,
|
| 653 |
+
"loss": 0.2066,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.16627739369869232,
|
| 659 |
+
"learning_rate": 1.5021503061851349e-05,
|
| 660 |
+
"loss": 0.2082,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.1565823256969452,
|
| 666 |
+
"learning_rate": 1.1071826065460589e-05,
|
| 667 |
+
"loss": 0.2,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.17333842813968658,
|
| 673 |
+
"learning_rate": 7.717832735397334e-06,
|
| 674 |
+
"loss": 0.2243,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.15922032296657562,
|
| 680 |
+
"learning_rate": 4.963609395891299e-06,
|
| 681 |
+
"loss": 0.2091,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.1890290379524231,
|
| 687 |
+
"learning_rate": 2.81251164382601e-06,
|
| 688 |
+
"loss": 0.2019,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.16079087555408478,
|
| 694 |
+
"learning_rate": 1.267160260461253e-06,
|
| 695 |
+
"loss": 0.2036,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.1514529585838318,
|
| 701 |
+
"learning_rate": 3.2943801841439634e-07,
|
| 702 |
+
"loss": 0.2098,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.14568041265010834,
|
| 708 |
+
"learning_rate": 4.873877924582715e-10,
|
| 709 |
+
"loss": 0.2068,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.0,
|
| 714 |
+
"step": 2500,
|
| 715 |
+
"total_flos": 1.62588235137024e+18,
|
| 716 |
+
"train_loss": 0.2613293670654297,
|
| 717 |
+
"train_runtime": 2323.6228,
|
| 718 |
+
"train_samples_per_second": 34.429,
|
| 719 |
+
"train_steps_per_second": 1.076
|
| 720 |
+
}
|
| 721 |
+
],
|
| 722 |
+
"logging_steps": 25,
|
| 723 |
+
"max_steps": 2500,
|
| 724 |
+
"num_input_tokens_seen": 0,
|
| 725 |
+
"num_train_epochs": 2,
|
| 726 |
+
"save_steps": 0,
|
| 727 |
+
"stateful_callbacks": {
|
| 728 |
+
"TrainerControl": {
|
| 729 |
+
"args": {
|
| 730 |
+
"should_epoch_stop": false,
|
| 731 |
+
"should_evaluate": false,
|
| 732 |
+
"should_log": false,
|
| 733 |
+
"should_save": true,
|
| 734 |
+
"should_training_stop": true
|
| 735 |
+
},
|
| 736 |
+
"attributes": {}
|
| 737 |
+
}
|
| 738 |
+
},
|
| 739 |
+
"total_flos": 1.62588235137024e+18,
|
| 740 |
+
"train_batch_size": 32,
|
| 741 |
+
"trial_name": null,
|
| 742 |
+
"trial_params": null
|
| 743 |
+
}
|
nl_tasks/exps/run_ex23/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex23/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex23/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex23/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex23/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex23/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex23/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7fe184c2f0054501fd2277b1721d9d9e59e0db117f73a18649bff587c9e5ef2c
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex23/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.48084303736686707,
|
| 15 |
+
"learning_rate": 6.4e-05,
|
| 16 |
+
"loss": 0.8758,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.2218693643808365,
|
| 22 |
+
"learning_rate": 0.00013066666666666665,
|
| 23 |
+
"loss": 0.5086,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.1952614188194275,
|
| 29 |
+
"learning_rate": 0.00019733333333333335,
|
| 30 |
+
"loss": 0.4161,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.17632220685482025,
|
| 36 |
+
"learning_rate": 0.000264,
|
| 37 |
+
"loss": 0.3766,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.19628162682056427,
|
| 43 |
+
"learning_rate": 0.00033066666666666666,
|
| 44 |
+
"loss": 0.3566,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.2977882921695709,
|
| 50 |
+
"learning_rate": 0.0003973333333333333,
|
| 51 |
+
"loss": 0.3472,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.17526467144489288,
|
| 57 |
+
"learning_rate": 0.00046400000000000006,
|
| 58 |
+
"loss": 0.3401,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.21100583672523499,
|
| 64 |
+
"learning_rate": 0.0005306666666666666,
|
| 65 |
+
"loss": 0.3369,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.1849939078092575,
|
| 71 |
+
"learning_rate": 0.0005973333333333334,
|
| 72 |
+
"loss": 0.3319,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.1735353320837021,
|
| 78 |
+
"learning_rate": 0.0006640000000000001,
|
| 79 |
+
"loss": 0.3244,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.1939871460199356,
|
| 85 |
+
"learning_rate": 0.0007306666666666667,
|
| 86 |
+
"loss": 0.3236,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.20389701426029205,
|
| 92 |
+
"learning_rate": 0.0007973333333333334,
|
| 93 |
+
"loss": 0.3336,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.18185636401176453,
|
| 99 |
+
"learning_rate": 0.000864,
|
| 100 |
+
"loss": 0.3241,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.22774292528629303,
|
| 106 |
+
"learning_rate": 0.0009306666666666667,
|
| 107 |
+
"loss": 0.3284,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.24220959842205048,
|
| 113 |
+
"learning_rate": 0.0009973333333333334,
|
| 114 |
+
"loss": 0.3239,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.2494906187057495,
|
| 120 |
+
"learning_rate": 0.0009998752338940611,
|
| 121 |
+
"loss": 0.3234,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.20020228624343872,
|
| 127 |
+
"learning_rate": 0.0009994799931474475,
|
| 128 |
+
"loss": 0.311,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.21591724455356598,
|
| 134 |
+
"learning_rate": 0.0009988142751731796,
|
| 135 |
+
"loss": 0.3148,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.22068051993846893,
|
| 141 |
+
"learning_rate": 0.0009978784404692847,
|
| 142 |
+
"loss": 0.3071,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.24510782957077026,
|
| 148 |
+
"learning_rate": 0.0009966729958067638,
|
| 149 |
+
"loss": 0.3075,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.2199760228395462,
|
| 155 |
+
"learning_rate": 0.0009951985939551673,
|
| 156 |
+
"loss": 0.3064,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.237228661775589,
|
| 162 |
+
"learning_rate": 0.0009934560333291077,
|
| 163 |
+
"loss": 0.2927,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.24607574939727783,
|
| 169 |
+
"learning_rate": 0.0009914462575559045,
|
| 170 |
+
"loss": 0.3116,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.23642563819885254,
|
| 176 |
+
"learning_rate": 0.000989170354964594,
|
| 177 |
+
"loss": 0.2974,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.22067102789878845,
|
| 183 |
+
"learning_rate": 0.0009866295579965781,
|
| 184 |
+
"loss": 0.3096,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.2489147186279297,
|
| 190 |
+
"learning_rate": 0.000983825242538238,
|
| 191 |
+
"loss": 0.3007,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.20756591856479645,
|
| 197 |
+
"learning_rate": 0.0009807589271758651,
|
| 198 |
+
"loss": 0.2873,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.2658780515193939,
|
| 204 |
+
"learning_rate": 0.0009774322723733215,
|
| 205 |
+
"loss": 0.2903,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.2340981364250183,
|
| 211 |
+
"learning_rate": 0.0009738470795728685,
|
| 212 |
+
"loss": 0.2951,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.23202940821647644,
|
| 218 |
+
"learning_rate": 0.000970005290219654,
|
| 219 |
+
"loss": 0.2894,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.22148098051548004,
|
| 225 |
+
"learning_rate": 0.0009659089847103863,
|
| 226 |
+
"loss": 0.2785,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.23035770654678345,
|
| 232 |
+
"learning_rate": 0.0009615603812667617,
|
| 233 |
+
"loss": 0.2833,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.20710399746894836,
|
| 239 |
+
"learning_rate": 0.0009569618347342592,
|
| 240 |
+
"loss": 0.274,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.21183089911937714,
|
| 246 |
+
"learning_rate": 0.0009521158353069494,
|
| 247 |
+
"loss": 0.2765,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.19836094975471497,
|
| 253 |
+
"learning_rate": 0.0009470250071790107,
|
| 254 |
+
"loss": 0.2846,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.19088484346866608,
|
| 260 |
+
"learning_rate": 0.0009416921071236822,
|
| 261 |
+
"loss": 0.2768,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.20932093262672424,
|
| 267 |
+
"learning_rate": 0.0009361200230004219,
|
| 268 |
+
"loss": 0.275,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.1993676722049713,
|
| 274 |
+
"learning_rate": 0.0009303117721910801,
|
| 275 |
+
"loss": 0.2717,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.19074216485023499,
|
| 281 |
+
"learning_rate": 0.0009242704999659339,
|
| 282 |
+
"loss": 0.2681,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.3126646876335144,
|
| 288 |
+
"learning_rate": 0.0009179994777804677,
|
| 289 |
+
"loss": 0.2749,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.21068663895130157,
|
| 295 |
+
"learning_rate": 0.0009115021015038226,
|
| 296 |
+
"loss": 0.2737,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.22428499162197113,
|
| 302 |
+
"learning_rate": 0.0009047818895798731,
|
| 303 |
+
"loss": 0.2631,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.2032303512096405,
|
| 309 |
+
"learning_rate": 0.0008978424811219277,
|
| 310 |
+
"loss": 0.2573,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.17513667047023773,
|
| 316 |
+
"learning_rate": 0.0008906876339420851,
|
| 317 |
+
"loss": 0.2591,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.19266444444656372,
|
| 323 |
+
"learning_rate": 0.0008833212225163124,
|
| 324 |
+
"loss": 0.2688,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.1570984125137329,
|
| 330 |
+
"learning_rate": 0.0008757472358863481,
|
| 331 |
+
"loss": 0.2592,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.21406503021717072,
|
| 337 |
+
"learning_rate": 0.0008679697754995655,
|
| 338 |
+
"loss": 0.2634,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.17088258266448975,
|
| 344 |
+
"learning_rate": 0.0008599930529879669,
|
| 345 |
+
"loss": 0.2606,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.18541710078716278,
|
| 351 |
+
"learning_rate": 0.0008518213878875102,
|
| 352 |
+
"loss": 0.2643,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.24920210242271423,
|
| 358 |
+
"learning_rate": 0.0008434592052990044,
|
| 359 |
+
"loss": 0.2529,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.1960878223180771,
|
| 365 |
+
"learning_rate": 0.0008349110334918391,
|
| 366 |
+
"loss": 0.2325,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.15122298896312714,
|
| 372 |
+
"learning_rate": 0.0008261815014518466,
|
| 373 |
+
"loss": 0.2407,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.16863854229450226,
|
| 379 |
+
"learning_rate": 0.000817275336374625,
|
| 380 |
+
"loss": 0.236,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.18454797565937042,
|
| 386 |
+
"learning_rate": 0.0008081973611056783,
|
| 387 |
+
"loss": 0.2401,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.14638011157512665,
|
| 393 |
+
"learning_rate": 0.0007989524915287594,
|
| 394 |
+
"loss": 0.2445,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.23536516726016998,
|
| 400 |
+
"learning_rate": 0.000789545733903834,
|
| 401 |
+
"loss": 0.2427,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.1823291927576065,
|
| 407 |
+
"learning_rate": 0.0007799821821561011,
|
| 408 |
+
"loss": 0.2342,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.16896045207977295,
|
| 414 |
+
"learning_rate": 0.0007702670151175434,
|
| 415 |
+
"loss": 0.2422,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.14874465763568878,
|
| 421 |
+
"learning_rate": 0.0007604054937224989,
|
| 422 |
+
"loss": 0.2367,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.17434418201446533,
|
| 428 |
+
"learning_rate": 0.00075040295815877,
|
| 429 |
+
"loss": 0.2394,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.1558532416820526,
|
| 435 |
+
"learning_rate": 0.0007402648249758203,
|
| 436 |
+
"loss": 0.2439,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.14951705932617188,
|
| 442 |
+
"learning_rate": 0.0007299965841516163,
|
| 443 |
+
"loss": 0.2324,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.17535166442394257,
|
| 449 |
+
"learning_rate": 0.0007196037961197066,
|
| 450 |
+
"loss": 0.2328,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.14109523594379425,
|
| 456 |
+
"learning_rate": 0.0007090920887581507,
|
| 457 |
+
"loss": 0.2274,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.16802997887134552,
|
| 463 |
+
"learning_rate": 0.0006984671543419228,
|
| 464 |
+
"loss": 0.2278,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.16117294132709503,
|
| 470 |
+
"learning_rate": 0.0006877347464604445,
|
| 471 |
+
"loss": 0.2217,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.13962742686271667,
|
| 477 |
+
"learning_rate": 0.0006769006769019148,
|
| 478 |
+
"loss": 0.2262,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.1441992223262787,
|
| 484 |
+
"learning_rate": 0.0006659708125061241,
|
| 485 |
+
"loss": 0.2255,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.14966872334480286,
|
| 491 |
+
"learning_rate": 0.0006549510719874577,
|
| 492 |
+
"loss": 0.2267,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.1714620292186737,
|
| 498 |
+
"learning_rate": 0.0006438474227298065,
|
| 499 |
+
"loss": 0.2267,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.14379504323005676,
|
| 505 |
+
"learning_rate": 0.0006326658775551235,
|
| 506 |
+
"loss": 0.2296,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.16740331053733826,
|
| 512 |
+
"learning_rate": 0.0006214124914673755,
|
| 513 |
+
"loss": 0.2148,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.13457053899765015,
|
| 519 |
+
"learning_rate": 0.0006100933583736508,
|
| 520 |
+
"loss": 0.2218,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.191158264875412,
|
| 526 |
+
"learning_rate": 0.0005987146077842015,
|
| 527 |
+
"loss": 0.2249,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.14882108569145203,
|
| 533 |
+
"learning_rate": 0.000587282401493205,
|
| 534 |
+
"loss": 0.2261,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.16314193606376648,
|
| 540 |
+
"learning_rate": 0.0005758029302420446,
|
| 541 |
+
"loss": 0.2235,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.14912760257720947,
|
| 547 |
+
"learning_rate": 0.0005642824103669125,
|
| 548 |
+
"loss": 0.222,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.15002597868442535,
|
| 554 |
+
"learning_rate": 0.000552727080432556,
|
| 555 |
+
"loss": 0.2117,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.1659402698278427,
|
| 561 |
+
"learning_rate": 0.0005411431978539828,
|
| 562 |
+
"loss": 0.2265,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.14878462255001068,
|
| 568 |
+
"learning_rate": 0.0005295370355079614,
|
| 569 |
+
"loss": 0.2254,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.13876402378082275,
|
| 575 |
+
"learning_rate": 0.0005179148783361473,
|
| 576 |
+
"loss": 0.2101,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.13744057714939117,
|
| 582 |
+
"learning_rate": 0.0005062830199416764,
|
| 583 |
+
"loss": 0.219,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.13792552053928375,
|
| 589 |
+
"learning_rate": 0.0004946477591810677,
|
| 590 |
+
"loss": 0.2243,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.1432466059923172,
|
| 596 |
+
"learning_rate": 0.00048301539675328197,
|
| 597 |
+
"loss": 0.2211,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.14318040013313293,
|
| 603 |
+
"learning_rate": 0.000471392231787781,
|
| 604 |
+
"loss": 0.2197,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.14931629598140717,
|
| 610 |
+
"learning_rate": 0.0004597845584334386,
|
| 611 |
+
"loss": 0.2107,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.12764166295528412,
|
| 617 |
+
"learning_rate": 0.00044819866245014836,
|
| 618 |
+
"loss": 0.228,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.13491638004779816,
|
| 624 |
+
"learning_rate": 0.0004366408178049728,
|
| 625 |
+
"loss": 0.2184,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.14066922664642334,
|
| 631 |
+
"learning_rate": 0.0004251172832746799,
|
| 632 |
+
"loss": 0.2109,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.13541120290756226,
|
| 638 |
+
"learning_rate": 0.00041363429905650543,
|
| 639 |
+
"loss": 0.2197,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.1387101262807846,
|
| 645 |
+
"learning_rate": 0.00040219808338897594,
|
| 646 |
+
"loss": 0.2126,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.1422133594751358,
|
| 652 |
+
"learning_rate": 0.0003908148291846224,
|
| 653 |
+
"loss": 0.2156,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.13704848289489746,
|
| 659 |
+
"learning_rate": 0.00037949070067640927,
|
| 660 |
+
"loss": 0.2182,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.1567024290561676,
|
| 666 |
+
"learning_rate": 0.00036823183007969373,
|
| 667 |
+
"loss": 0.2096,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.16875536739826202,
|
| 673 |
+
"learning_rate": 0.00035704431427152237,
|
| 674 |
+
"loss": 0.2333,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.15320144593715668,
|
| 680 |
+
"learning_rate": 0.00034593421148906525,
|
| 681 |
+
"loss": 0.2176,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.16132648289203644,
|
| 687 |
+
"learning_rate": 0.0003349075380489731,
|
| 688 |
+
"loss": 0.2105,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.1457502841949463,
|
| 694 |
+
"learning_rate": 0.0003239702650894364,
|
| 695 |
+
"loss": 0.2116,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.13578177988529205,
|
| 701 |
+
"learning_rate": 0.00031312831533670773,
|
| 702 |
+
"loss": 0.2171,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.1279817819595337,
|
| 708 |
+
"learning_rate": 0.0003023875598978419,
|
| 709 |
+
"loss": 0.2128,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.1550670564174652,
|
| 715 |
+
"learning_rate": 0.0002917538150813876,
|
| 716 |
+
"loss": 0.1909,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.1622520536184311,
|
| 722 |
+
"learning_rate": 0.00028123283924775356,
|
| 723 |
+
"loss": 0.1833,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.1415659636259079,
|
| 729 |
+
"learning_rate": 0.00027083032969095505,
|
| 730 |
+
"loss": 0.1915,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.15416212379932404,
|
| 736 |
+
"learning_rate": 0.0002605519195534288,
|
| 737 |
+
"loss": 0.1834,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.14025457203388214,
|
| 743 |
+
"learning_rate": 0.00025040317477558616,
|
| 744 |
+
"loss": 0.1903,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.14042459428310394,
|
| 750 |
+
"learning_rate": 0.00024038959108175928,
|
| 751 |
+
"loss": 0.184,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.13581395149230957,
|
| 757 |
+
"learning_rate": 0.0002305165910041689,
|
| 758 |
+
"loss": 0.1909,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.15179701149463654,
|
| 764 |
+
"learning_rate": 0.00022078952094652704,
|
| 765 |
+
"loss": 0.187,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.1464652419090271,
|
| 771 |
+
"learning_rate": 0.0002112136482888663,
|
| 772 |
+
"loss": 0.1939,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.15152765810489655,
|
| 778 |
+
"learning_rate": 0.00020179415853515908,
|
| 779 |
+
"loss": 0.1894,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.13474231958389282,
|
| 785 |
+
"learning_rate": 0.00019253615250527738,
|
| 786 |
+
"loss": 0.1832,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.1342306286096573,
|
| 792 |
+
"learning_rate": 0.00018344464357280722,
|
| 793 |
+
"loss": 0.1838,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.13246561586856842,
|
| 799 |
+
"learning_rate": 0.00017452455495021963,
|
| 800 |
+
"loss": 0.1942,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.13951082527637482,
|
| 806 |
+
"learning_rate": 0.00016578071702286396,
|
| 807 |
+
"loss": 0.1816,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.13456101715564728,
|
| 813 |
+
"learning_rate": 0.00015721786473322824,
|
| 814 |
+
"loss": 0.185,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.13722851872444153,
|
| 820 |
+
"learning_rate": 0.0001488406350168854,
|
| 821 |
+
"loss": 0.193,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.12870334088802338,
|
| 827 |
+
"learning_rate": 0.00014065356429151033,
|
| 828 |
+
"loss": 0.1819,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.14238068461418152,
|
| 834 |
+
"learning_rate": 0.0001326610860003293,
|
| 835 |
+
"loss": 0.1884,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.13999466598033905,
|
| 841 |
+
"learning_rate": 0.00012486752821133312,
|
| 842 |
+
"loss": 0.1822,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.1360911726951599,
|
| 848 |
+
"learning_rate": 0.00011727711127355117,
|
| 849 |
+
"loss": 0.183,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.14922741055488586,
|
| 855 |
+
"learning_rate": 0.00010989394553165833,
|
| 856 |
+
"loss": 0.1861,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.1330823451280594,
|
| 862 |
+
"learning_rate": 0.00010272202910015082,
|
| 863 |
+
"loss": 0.1885,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.14635007083415985,
|
| 869 |
+
"learning_rate": 9.576524569829692e-05,
|
| 870 |
+
"loss": 0.1871,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.15063054859638214,
|
| 876 |
+
"learning_rate": 8.902736254703348e-05,
|
| 877 |
+
"loss": 0.1822,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.15703986585140228,
|
| 883 |
+
"learning_rate": 8.251202832895066e-05,
|
| 884 |
+
"loss": 0.1895,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.14821434020996094,
|
| 890 |
+
"learning_rate": 7.622277121246512e-05,
|
| 891 |
+
"loss": 0.1825,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.14606979489326477,
|
| 897 |
+
"learning_rate": 7.01629969412545e-05,
|
| 898 |
+
"loss": 0.1865,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.1357213705778122,
|
| 904 |
+
"learning_rate": 6.433598698998766e-05,
|
| 905 |
+
"loss": 0.1827,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.13560768961906433,
|
| 911 |
+
"learning_rate": 5.874489678734812e-05,
|
| 912 |
+
"loss": 0.1791,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.15358999371528625,
|
| 918 |
+
"learning_rate": 5.3392754007313305e-05,
|
| 919 |
+
"loss": 0.1802,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.1459738314151764,
|
| 925 |
+
"learning_rate": 4.828245692961608e-05,
|
| 926 |
+
"loss": 0.1823,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.13794459402561188,
|
| 932 |
+
"learning_rate": 4.341677287027529e-05,
|
| 933 |
+
"loss": 0.1812,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.1369146853685379,
|
| 939 |
+
"learning_rate": 3.8798336683045054e-05,
|
| 940 |
+
"loss": 0.1845,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.1478826403617859,
|
| 946 |
+
"learning_rate": 3.442964933259474e-05,
|
| 947 |
+
"loss": 0.1863,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.13746634125709534,
|
| 953 |
+
"learning_rate": 3.0313076540192996e-05,
|
| 954 |
+
"loss": 0.183,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.1631135642528534,
|
| 960 |
+
"learning_rate": 2.6450847502627883e-05,
|
| 961 |
+
"loss": 0.1944,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.11646343022584915,
|
| 967 |
+
"learning_rate": 2.2845053685056904e-05,
|
| 968 |
+
"loss": 0.185,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.13773666322231293,
|
| 974 |
+
"learning_rate": 1.9497647688442477e-05,
|
| 975 |
+
"loss": 0.1759,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.16493980586528778,
|
| 981 |
+
"learning_rate": 1.6410442192183573e-05,
|
| 982 |
+
"loss": 0.1789,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.14070625603199005,
|
| 988 |
+
"learning_rate": 1.3585108972518078e-05,
|
| 989 |
+
"loss": 0.1804,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.142463818192482,
|
| 995 |
+
"learning_rate": 1.1023177997226297e-05,
|
| 996 |
+
"loss": 0.1834,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.1492580622434616,
|
| 1002 |
+
"learning_rate": 8.72603659712662e-06,
|
| 1003 |
+
"loss": 0.1791,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.13910527527332306,
|
| 1009 |
+
"learning_rate": 6.694928714811255e-06,
|
| 1010 |
+
"loss": 0.1765,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.14240440726280212,
|
| 1016 |
+
"learning_rate": 4.93095423102935e-06,
|
| 1017 |
+
"loss": 0.1841,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.14672136306762695,
|
| 1023 |
+
"learning_rate": 3.435068369082306e-06,
|
| 1024 |
+
"loss": 0.1847,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.16310083866119385,
|
| 1030 |
+
"learning_rate": 2.2080811775535003e-06,
|
| 1031 |
+
"loss": 0.1822,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.14073088765144348,
|
| 1037 |
+
"learning_rate": 1.2506570916523408e-06,
|
| 1038 |
+
"loss": 0.1771,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.14259496331214905,
|
| 1044 |
+
"learning_rate": 5.633145734114664e-07,
|
| 1045 |
+
"loss": 0.1789,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.1306188851594925,
|
| 1051 |
+
"learning_rate": 1.4642583092999705e-07,
|
| 1052 |
+
"loss": 0.1778,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.12538310885429382,
|
| 1058 |
+
"learning_rate": 2.166168162065496e-10,
|
| 1059 |
+
"loss": 0.1828,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.24165712381998697,
|
| 1067 |
+
"train_runtime": 3457.8533,
|
| 1068 |
+
"train_samples_per_second": 34.704,
|
| 1069 |
+
"train_steps_per_second": 1.084
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|
nl_tasks/exps/run_ex24/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"v_proj",
|
| 14 |
+
"q_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex24/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex24/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex24/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex24/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex24/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"v_proj",
|
| 14 |
+
"q_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex24/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9286ac0906fe2d5e4381d3fac1b20723cd01c3f173e47f3c779884b82886b499
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex24/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.20721325278282166,
|
| 15 |
+
"learning_rate": 0.00064,
|
| 16 |
+
"loss": 0.6429,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.2121923863887787,
|
| 22 |
+
"learning_rate": 0.0013066666666666665,
|
| 23 |
+
"loss": 0.3881,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.33458471298217773,
|
| 29 |
+
"learning_rate": 0.0019733333333333334,
|
| 30 |
+
"loss": 0.3796,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.4793953597545624,
|
| 36 |
+
"learning_rate": 0.00264,
|
| 37 |
+
"loss": 0.3743,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 1.7511122226715088,
|
| 43 |
+
"learning_rate": 0.0033066666666666665,
|
| 44 |
+
"loss": 0.3919,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.4728436768054962,
|
| 50 |
+
"learning_rate": 0.003973333333333333,
|
| 51 |
+
"loss": 0.4166,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.6393271088600159,
|
| 57 |
+
"learning_rate": 0.00464,
|
| 58 |
+
"loss": 0.3913,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.368111252784729,
|
| 64 |
+
"learning_rate": 0.005306666666666666,
|
| 65 |
+
"loss": 0.3718,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.33478349447250366,
|
| 71 |
+
"learning_rate": 0.005973333333333334,
|
| 72 |
+
"loss": 0.3573,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.1816096007823944,
|
| 78 |
+
"learning_rate": 0.00664,
|
| 79 |
+
"loss": 0.3395,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.1968374103307724,
|
| 85 |
+
"learning_rate": 0.007306666666666667,
|
| 86 |
+
"loss": 0.3327,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.22355352342128754,
|
| 92 |
+
"learning_rate": 0.007973333333333334,
|
| 93 |
+
"loss": 0.3403,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.09445228427648544,
|
| 99 |
+
"learning_rate": 0.00864,
|
| 100 |
+
"loss": 0.3281,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.11095510423183441,
|
| 106 |
+
"learning_rate": 0.009306666666666666,
|
| 107 |
+
"loss": 0.3263,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.12437857687473297,
|
| 113 |
+
"learning_rate": 0.009973333333333332,
|
| 114 |
+
"loss": 0.3186,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.12249485403299332,
|
| 120 |
+
"learning_rate": 0.009998752338940611,
|
| 121 |
+
"loss": 0.3178,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.09227700531482697,
|
| 127 |
+
"learning_rate": 0.009994799931474474,
|
| 128 |
+
"loss": 0.3047,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.08722732216119766,
|
| 134 |
+
"learning_rate": 0.009988142751731795,
|
| 135 |
+
"loss": 0.3051,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.0776078999042511,
|
| 141 |
+
"learning_rate": 0.009978784404692846,
|
| 142 |
+
"loss": 0.2982,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.07535282522439957,
|
| 148 |
+
"learning_rate": 0.009966729958067638,
|
| 149 |
+
"loss": 0.2927,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.0663604885339737,
|
| 155 |
+
"learning_rate": 0.009951985939551673,
|
| 156 |
+
"loss": 0.2934,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.0725964903831482,
|
| 162 |
+
"learning_rate": 0.009934560333291076,
|
| 163 |
+
"loss": 0.2794,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.07474663853645325,
|
| 169 |
+
"learning_rate": 0.009914462575559044,
|
| 170 |
+
"loss": 0.2972,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.06644977629184723,
|
| 176 |
+
"learning_rate": 0.009891703549645938,
|
| 177 |
+
"loss": 0.2811,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.055261533707380295,
|
| 183 |
+
"learning_rate": 0.009866295579965781,
|
| 184 |
+
"loss": 0.2946,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.05713541805744171,
|
| 190 |
+
"learning_rate": 0.009838252425382379,
|
| 191 |
+
"loss": 0.2857,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.049348220229148865,
|
| 197 |
+
"learning_rate": 0.00980758927175865,
|
| 198 |
+
"loss": 0.2728,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.08552377671003342,
|
| 204 |
+
"learning_rate": 0.009774322723733216,
|
| 205 |
+
"loss": 0.2747,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.06687381863594055,
|
| 211 |
+
"learning_rate": 0.009738470795728685,
|
| 212 |
+
"loss": 0.2803,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.06733611971139908,
|
| 218 |
+
"learning_rate": 0.00970005290219654,
|
| 219 |
+
"loss": 0.2782,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.0917917937040329,
|
| 225 |
+
"learning_rate": 0.009659089847103862,
|
| 226 |
+
"loss": 0.2664,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.05817510187625885,
|
| 232 |
+
"learning_rate": 0.009615603812667618,
|
| 233 |
+
"loss": 0.273,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.057370614260435104,
|
| 239 |
+
"learning_rate": 0.009569618347342592,
|
| 240 |
+
"loss": 0.265,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.05295673757791519,
|
| 246 |
+
"learning_rate": 0.009521158353069494,
|
| 247 |
+
"loss": 0.2661,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.04886120930314064,
|
| 253 |
+
"learning_rate": 0.009470250071790108,
|
| 254 |
+
"loss": 0.274,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.053231801837682724,
|
| 260 |
+
"learning_rate": 0.009416921071236822,
|
| 261 |
+
"loss": 0.2685,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.056384552270174026,
|
| 267 |
+
"learning_rate": 0.009361200230004219,
|
| 268 |
+
"loss": 0.2658,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.05400422215461731,
|
| 274 |
+
"learning_rate": 0.009303117721910802,
|
| 275 |
+
"loss": 0.2637,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.04524515941739082,
|
| 281 |
+
"learning_rate": 0.009242704999659339,
|
| 282 |
+
"loss": 0.2593,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.06007376313209534,
|
| 288 |
+
"learning_rate": 0.009179994777804677,
|
| 289 |
+
"loss": 0.267,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.05714306980371475,
|
| 295 |
+
"learning_rate": 0.009115021015038227,
|
| 296 |
+
"loss": 0.2662,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.0461350679397583,
|
| 302 |
+
"learning_rate": 0.009047818895798731,
|
| 303 |
+
"loss": 0.2573,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.046251099556684494,
|
| 309 |
+
"learning_rate": 0.008978424811219276,
|
| 310 |
+
"loss": 0.2522,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.051277272403240204,
|
| 316 |
+
"learning_rate": 0.00890687633942085,
|
| 317 |
+
"loss": 0.2537,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.05851563438773155,
|
| 323 |
+
"learning_rate": 0.008833212225163124,
|
| 324 |
+
"loss": 0.2625,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.045936476439237595,
|
| 330 |
+
"learning_rate": 0.008757472358863481,
|
| 331 |
+
"loss": 0.2557,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.047913141548633575,
|
| 337 |
+
"learning_rate": 0.008679697754995655,
|
| 338 |
+
"loss": 0.2603,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.041925642639398575,
|
| 344 |
+
"learning_rate": 0.008599930529879668,
|
| 345 |
+
"loss": 0.2584,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.0477525033056736,
|
| 351 |
+
"learning_rate": 0.008518213878875101,
|
| 352 |
+
"loss": 0.2607,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.04758713021874428,
|
| 358 |
+
"learning_rate": 0.008434592052990044,
|
| 359 |
+
"loss": 0.2498,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.046754106879234314,
|
| 365 |
+
"learning_rate": 0.00834911033491839,
|
| 366 |
+
"loss": 0.226,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.03786053881049156,
|
| 372 |
+
"learning_rate": 0.008261815014518465,
|
| 373 |
+
"loss": 0.2346,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.04742316156625748,
|
| 379 |
+
"learning_rate": 0.00817275336374625,
|
| 380 |
+
"loss": 0.2312,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.04774940013885498,
|
| 386 |
+
"learning_rate": 0.008081973611056783,
|
| 387 |
+
"loss": 0.236,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.035334907472133636,
|
| 393 |
+
"learning_rate": 0.007989524915287595,
|
| 394 |
+
"loss": 0.2396,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.04027274250984192,
|
| 400 |
+
"learning_rate": 0.00789545733903834,
|
| 401 |
+
"loss": 0.2381,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.03744512051343918,
|
| 407 |
+
"learning_rate": 0.00779982182156101,
|
| 408 |
+
"loss": 0.2301,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.06715697795152664,
|
| 414 |
+
"learning_rate": 0.007702670151175435,
|
| 415 |
+
"loss": 0.237,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.0376882441341877,
|
| 421 |
+
"learning_rate": 0.007604054937224989,
|
| 422 |
+
"loss": 0.2345,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.04710308834910393,
|
| 428 |
+
"learning_rate": 0.0075040295815877005,
|
| 429 |
+
"loss": 0.2365,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.03648149594664574,
|
| 435 |
+
"learning_rate": 0.007402648249758204,
|
| 436 |
+
"loss": 0.2401,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.03415144234895706,
|
| 442 |
+
"learning_rate": 0.007299965841516163,
|
| 443 |
+
"loss": 0.2313,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.039939895272254944,
|
| 449 |
+
"learning_rate": 0.007196037961197066,
|
| 450 |
+
"loss": 0.2303,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.0336175300180912,
|
| 456 |
+
"learning_rate": 0.007090920887581507,
|
| 457 |
+
"loss": 0.2253,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.03801807761192322,
|
| 463 |
+
"learning_rate": 0.0069846715434192285,
|
| 464 |
+
"loss": 0.2265,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.03818347677588463,
|
| 470 |
+
"learning_rate": 0.006877347464604445,
|
| 471 |
+
"loss": 0.2201,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.03448287397623062,
|
| 477 |
+
"learning_rate": 0.006769006769019147,
|
| 478 |
+
"loss": 0.2251,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.035138051956892014,
|
| 484 |
+
"learning_rate": 0.006659708125061241,
|
| 485 |
+
"loss": 0.2244,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.03661292791366577,
|
| 491 |
+
"learning_rate": 0.006549510719874577,
|
| 492 |
+
"loss": 0.2252,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.038888540118932724,
|
| 498 |
+
"learning_rate": 0.0064384742272980645,
|
| 499 |
+
"loss": 0.2252,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.03597492724657059,
|
| 505 |
+
"learning_rate": 0.006326658775551235,
|
| 506 |
+
"loss": 0.2276,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.04128337651491165,
|
| 512 |
+
"learning_rate": 0.006214124914673755,
|
| 513 |
+
"loss": 0.2139,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.03295692428946495,
|
| 519 |
+
"learning_rate": 0.006100933583736507,
|
| 520 |
+
"loss": 0.2216,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.04477379098534584,
|
| 526 |
+
"learning_rate": 0.005987146077842015,
|
| 527 |
+
"loss": 0.2247,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.0380830354988575,
|
| 533 |
+
"learning_rate": 0.00587282401493205,
|
| 534 |
+
"loss": 0.2254,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.03974383324384689,
|
| 540 |
+
"learning_rate": 0.005758029302420446,
|
| 541 |
+
"loss": 0.224,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.03354157134890556,
|
| 547 |
+
"learning_rate": 0.005642824103669126,
|
| 548 |
+
"loss": 0.2209,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.03308559209108353,
|
| 554 |
+
"learning_rate": 0.00552727080432556,
|
| 555 |
+
"loss": 0.2112,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.04159720614552498,
|
| 561 |
+
"learning_rate": 0.005411431978539828,
|
| 562 |
+
"loss": 0.226,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.034576594829559326,
|
| 568 |
+
"learning_rate": 0.005295370355079614,
|
| 569 |
+
"loss": 0.2247,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.0362069196999073,
|
| 575 |
+
"learning_rate": 0.005179148783361473,
|
| 576 |
+
"loss": 0.2097,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.04194594547152519,
|
| 582 |
+
"learning_rate": 0.005062830199416763,
|
| 583 |
+
"loss": 0.2194,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.032347146421670914,
|
| 589 |
+
"learning_rate": 0.004946477591810677,
|
| 590 |
+
"loss": 0.2242,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.033743757754564285,
|
| 596 |
+
"learning_rate": 0.00483015396753282,
|
| 597 |
+
"loss": 0.2209,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.03348388895392418,
|
| 603 |
+
"learning_rate": 0.00471392231787781,
|
| 604 |
+
"loss": 0.2188,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.0340593196451664,
|
| 610 |
+
"learning_rate": 0.004597845584334386,
|
| 611 |
+
"loss": 0.2115,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.030615000054240227,
|
| 617 |
+
"learning_rate": 0.004481986624501484,
|
| 618 |
+
"loss": 0.2279,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.033475857228040695,
|
| 624 |
+
"learning_rate": 0.004366408178049728,
|
| 625 |
+
"loss": 0.2186,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.03454643115401268,
|
| 631 |
+
"learning_rate": 0.004251172832746799,
|
| 632 |
+
"loss": 0.2099,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.03211412578821182,
|
| 638 |
+
"learning_rate": 0.004136342990565055,
|
| 639 |
+
"loss": 0.2206,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.03475036099553108,
|
| 645 |
+
"learning_rate": 0.00402198083388976,
|
| 646 |
+
"loss": 0.2119,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.04023045673966408,
|
| 652 |
+
"learning_rate": 0.0039081482918462244,
|
| 653 |
+
"loss": 0.215,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.03013662062585354,
|
| 659 |
+
"learning_rate": 0.003794907006764093,
|
| 660 |
+
"loss": 0.2189,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.03530523553490639,
|
| 666 |
+
"learning_rate": 0.0036823183007969372,
|
| 667 |
+
"loss": 0.2099,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.038400132209062576,
|
| 673 |
+
"learning_rate": 0.003570443142715224,
|
| 674 |
+
"loss": 0.234,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.03556523099541664,
|
| 680 |
+
"learning_rate": 0.0034593421148906523,
|
| 681 |
+
"loss": 0.2187,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.04098186269402504,
|
| 687 |
+
"learning_rate": 0.0033490753804897313,
|
| 688 |
+
"loss": 0.21,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.036452557891607285,
|
| 694 |
+
"learning_rate": 0.0032397026508943637,
|
| 695 |
+
"loss": 0.2118,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.03421608358621597,
|
| 701 |
+
"learning_rate": 0.003131283153367077,
|
| 702 |
+
"loss": 0.2171,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.030221326276659966,
|
| 708 |
+
"learning_rate": 0.003023875598978419,
|
| 709 |
+
"loss": 0.2125,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.035226210951805115,
|
| 715 |
+
"learning_rate": 0.002917538150813876,
|
| 716 |
+
"loss": 0.1886,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.03686540946364403,
|
| 722 |
+
"learning_rate": 0.0028123283924775355,
|
| 723 |
+
"loss": 0.1823,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.03392050787806511,
|
| 729 |
+
"learning_rate": 0.0027083032969095508,
|
| 730 |
+
"loss": 0.1899,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.03575480356812477,
|
| 736 |
+
"learning_rate": 0.002605519195534288,
|
| 737 |
+
"loss": 0.1812,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.031089287251234055,
|
| 743 |
+
"learning_rate": 0.0025040317477558614,
|
| 744 |
+
"loss": 0.1897,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.031031444668769836,
|
| 750 |
+
"learning_rate": 0.002403895910817593,
|
| 751 |
+
"loss": 0.1816,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.03138287365436554,
|
| 757 |
+
"learning_rate": 0.002305165910041689,
|
| 758 |
+
"loss": 0.1891,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.035979412496089935,
|
| 764 |
+
"learning_rate": 0.0022078952094652703,
|
| 765 |
+
"loss": 0.1854,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.03277301788330078,
|
| 771 |
+
"learning_rate": 0.0021121364828886632,
|
| 772 |
+
"loss": 0.1917,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.03231624513864517,
|
| 778 |
+
"learning_rate": 0.002017941585351591,
|
| 779 |
+
"loss": 0.1872,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.03247809037566185,
|
| 785 |
+
"learning_rate": 0.001925361525052774,
|
| 786 |
+
"loss": 0.1817,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.030497411265969276,
|
| 792 |
+
"learning_rate": 0.001834446435728072,
|
| 793 |
+
"loss": 0.1821,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.03446466475725174,
|
| 799 |
+
"learning_rate": 0.0017452455495021962,
|
| 800 |
+
"loss": 0.1925,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.03429936617612839,
|
| 806 |
+
"learning_rate": 0.0016578071702286396,
|
| 807 |
+
"loss": 0.1798,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.030741166323423386,
|
| 813 |
+
"learning_rate": 0.0015721786473322824,
|
| 814 |
+
"loss": 0.1842,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.02960963360965252,
|
| 820 |
+
"learning_rate": 0.0014884063501688538,
|
| 821 |
+
"loss": 0.1913,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.026998884975910187,
|
| 827 |
+
"learning_rate": 0.0014065356429151033,
|
| 828 |
+
"loss": 0.18,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.04103350639343262,
|
| 834 |
+
"learning_rate": 0.0013266108600032927,
|
| 835 |
+
"loss": 0.1867,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.03400329872965813,
|
| 841 |
+
"learning_rate": 0.0012486752821133313,
|
| 842 |
+
"loss": 0.1798,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.03053884394466877,
|
| 848 |
+
"learning_rate": 0.0011727711127355117,
|
| 849 |
+
"loss": 0.181,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.03675390034914017,
|
| 855 |
+
"learning_rate": 0.0010989394553165832,
|
| 856 |
+
"loss": 0.182,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.030522828921675682,
|
| 862 |
+
"learning_rate": 0.0010272202910015082,
|
| 863 |
+
"loss": 0.1862,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.03357331454753876,
|
| 869 |
+
"learning_rate": 0.0009576524569829692,
|
| 870 |
+
"loss": 0.1853,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.037533897906541824,
|
| 876 |
+
"learning_rate": 0.0008902736254703347,
|
| 877 |
+
"loss": 0.1795,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.03461192548274994,
|
| 883 |
+
"learning_rate": 0.0008251202832895066,
|
| 884 |
+
"loss": 0.1873,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.03553595393896103,
|
| 890 |
+
"learning_rate": 0.0007622277121246512,
|
| 891 |
+
"loss": 0.1789,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.03394347429275513,
|
| 897 |
+
"learning_rate": 0.000701629969412545,
|
| 898 |
+
"loss": 0.185,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.0283980555832386,
|
| 904 |
+
"learning_rate": 0.0006433598698998766,
|
| 905 |
+
"loss": 0.1805,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.03025851771235466,
|
| 911 |
+
"learning_rate": 0.0005874489678734812,
|
| 912 |
+
"loss": 0.1762,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.03542542830109596,
|
| 918 |
+
"learning_rate": 0.000533927540073133,
|
| 919 |
+
"loss": 0.1782,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.03365590050816536,
|
| 925 |
+
"learning_rate": 0.0004828245692961608,
|
| 926 |
+
"loss": 0.1795,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.0319739505648613,
|
| 932 |
+
"learning_rate": 0.0004341677287027529,
|
| 933 |
+
"loss": 0.1788,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.030165359377861023,
|
| 939 |
+
"learning_rate": 0.00038798336683045057,
|
| 940 |
+
"loss": 0.1814,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.03518758341670036,
|
| 946 |
+
"learning_rate": 0.0003442964933259474,
|
| 947 |
+
"loss": 0.1836,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.036399878561496735,
|
| 953 |
+
"learning_rate": 0.00030313076540192997,
|
| 954 |
+
"loss": 0.1793,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.04823492839932442,
|
| 960 |
+
"learning_rate": 0.00026450847502627883,
|
| 961 |
+
"loss": 0.1904,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.02590683102607727,
|
| 967 |
+
"learning_rate": 0.00022845053685056904,
|
| 968 |
+
"loss": 0.1816,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.030667992308735847,
|
| 974 |
+
"learning_rate": 0.00019497647688442477,
|
| 975 |
+
"loss": 0.1726,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.036195989698171616,
|
| 981 |
+
"learning_rate": 0.00016410442192183573,
|
| 982 |
+
"loss": 0.1759,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.033511728048324585,
|
| 988 |
+
"learning_rate": 0.0001358510897251808,
|
| 989 |
+
"loss": 0.1769,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.03249663487076759,
|
| 995 |
+
"learning_rate": 0.00011023177997226297,
|
| 996 |
+
"loss": 0.1799,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.035352785140275955,
|
| 1002 |
+
"learning_rate": 8.726036597126619e-05,
|
| 1003 |
+
"loss": 0.176,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.03339748457074165,
|
| 1009 |
+
"learning_rate": 6.694928714811255e-05,
|
| 1010 |
+
"loss": 0.1732,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.03594556823372841,
|
| 1016 |
+
"learning_rate": 4.93095423102935e-05,
|
| 1017 |
+
"loss": 0.1812,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.03444835543632507,
|
| 1023 |
+
"learning_rate": 3.435068369082306e-05,
|
| 1024 |
+
"loss": 0.1807,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.04031510651111603,
|
| 1030 |
+
"learning_rate": 2.2080811775535005e-05,
|
| 1031 |
+
"loss": 0.1779,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.03477194532752037,
|
| 1037 |
+
"learning_rate": 1.2506570916523408e-05,
|
| 1038 |
+
"loss": 0.173,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.03274468705058098,
|
| 1044 |
+
"learning_rate": 5.633145734114664e-06,
|
| 1045 |
+
"loss": 0.1752,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.02915850654244423,
|
| 1051 |
+
"learning_rate": 1.4642583092999705e-06,
|
| 1052 |
+
"loss": 0.1739,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.027722839266061783,
|
| 1058 |
+
"learning_rate": 2.166168162065496e-09,
|
| 1059 |
+
"loss": 0.1796,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.23716249809265136,
|
| 1067 |
+
"train_runtime": 3463.2844,
|
| 1068 |
+
"train_samples_per_second": 34.649,
|
| 1069 |
+
"train_steps_per_second": 1.083
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|
nl_tasks/exps/run_ex25/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex25/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex25/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex25/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex25/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex25/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex25/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e54f96c611761c8f54fb1b97d9d3282a5a1f6499b2f14ce38d4a7c4d7f849202
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex25/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.2934044301509857,
|
| 15 |
+
"learning_rate": 0.000128,
|
| 16 |
+
"loss": 0.8066,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.20665736496448517,
|
| 22 |
+
"learning_rate": 0.0002613333333333333,
|
| 23 |
+
"loss": 0.4446,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.17562589049339294,
|
| 29 |
+
"learning_rate": 0.0003946666666666667,
|
| 30 |
+
"loss": 0.3942,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.1656724214553833,
|
| 36 |
+
"learning_rate": 0.000528,
|
| 37 |
+
"loss": 0.362,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.17268958687782288,
|
| 43 |
+
"learning_rate": 0.0006613333333333333,
|
| 44 |
+
"loss": 0.3454,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.219743013381958,
|
| 50 |
+
"learning_rate": 0.0007946666666666666,
|
| 51 |
+
"loss": 0.3382,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.1991826742887497,
|
| 57 |
+
"learning_rate": 0.0009280000000000001,
|
| 58 |
+
"loss": 0.3339,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.23559680581092834,
|
| 64 |
+
"learning_rate": 0.0010613333333333332,
|
| 65 |
+
"loss": 0.3343,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.23135684430599213,
|
| 71 |
+
"learning_rate": 0.0011946666666666668,
|
| 72 |
+
"loss": 0.3329,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.24547122418880463,
|
| 78 |
+
"learning_rate": 0.0013280000000000002,
|
| 79 |
+
"loss": 0.3294,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.43551498651504517,
|
| 85 |
+
"learning_rate": 0.0014613333333333334,
|
| 86 |
+
"loss": 0.3412,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.3377213478088379,
|
| 92 |
+
"learning_rate": 0.0015946666666666668,
|
| 93 |
+
"loss": 0.7195,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.24051505327224731,
|
| 99 |
+
"learning_rate": 0.001728,
|
| 100 |
+
"loss": 0.3476,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.25818943977355957,
|
| 106 |
+
"learning_rate": 0.0018613333333333333,
|
| 107 |
+
"loss": 0.3497,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.2813394367694855,
|
| 113 |
+
"learning_rate": 0.0019946666666666667,
|
| 114 |
+
"loss": 0.3444,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.294381320476532,
|
| 120 |
+
"learning_rate": 0.0019997504677881223,
|
| 121 |
+
"loss": 0.3383,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.2145279496908188,
|
| 127 |
+
"learning_rate": 0.001998959986294895,
|
| 128 |
+
"loss": 0.322,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.23977665603160858,
|
| 134 |
+
"learning_rate": 0.001997628550346359,
|
| 135 |
+
"loss": 0.3223,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.19997288286685944,
|
| 141 |
+
"learning_rate": 0.0019957568809385695,
|
| 142 |
+
"loss": 0.3121,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.19473682343959808,
|
| 148 |
+
"learning_rate": 0.0019933459916135275,
|
| 149 |
+
"loss": 0.3085,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.16213826835155487,
|
| 155 |
+
"learning_rate": 0.0019903971879103345,
|
| 156 |
+
"loss": 0.3046,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.2148275524377823,
|
| 162 |
+
"learning_rate": 0.0019869120666582154,
|
| 163 |
+
"loss": 0.289,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.19214589893817902,
|
| 169 |
+
"learning_rate": 0.001982892515111809,
|
| 170 |
+
"loss": 0.3065,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.21339517831802368,
|
| 176 |
+
"learning_rate": 0.001978340709929188,
|
| 177 |
+
"loss": 0.291,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.17655597627162933,
|
| 183 |
+
"learning_rate": 0.0019732591159931563,
|
| 184 |
+
"loss": 0.3024,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.19270546734333038,
|
| 190 |
+
"learning_rate": 0.001967650485076476,
|
| 191 |
+
"loss": 0.2943,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.12036184221506119,
|
| 197 |
+
"learning_rate": 0.0019615178543517303,
|
| 198 |
+
"loss": 0.2795,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.21230359375476837,
|
| 204 |
+
"learning_rate": 0.001954864544746643,
|
| 205 |
+
"loss": 0.2803,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.20738306641578674,
|
| 211 |
+
"learning_rate": 0.001947694159145737,
|
| 212 |
+
"loss": 0.2865,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.16709482669830322,
|
| 218 |
+
"learning_rate": 0.001940010580439308,
|
| 219 |
+
"loss": 0.2818,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.17816157639026642,
|
| 225 |
+
"learning_rate": 0.0019318179694207726,
|
| 226 |
+
"loss": 0.2694,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.5133901834487915,
|
| 232 |
+
"learning_rate": 0.0019231207625335235,
|
| 233 |
+
"loss": 0.2757,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.14037635922431946,
|
| 239 |
+
"learning_rate": 0.0019139236694685185,
|
| 240 |
+
"loss": 0.2692,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.16915568709373474,
|
| 246 |
+
"learning_rate": 0.0019042316706138988,
|
| 247 |
+
"loss": 0.2692,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.1274840533733368,
|
| 253 |
+
"learning_rate": 0.0018940500143580215,
|
| 254 |
+
"loss": 0.2774,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.14953529834747314,
|
| 260 |
+
"learning_rate": 0.0018833842142473643,
|
| 261 |
+
"loss": 0.2709,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.17669950425624847,
|
| 267 |
+
"learning_rate": 0.0018722400460008438,
|
| 268 |
+
"loss": 0.2674,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.137393981218338,
|
| 274 |
+
"learning_rate": 0.0018606235443821603,
|
| 275 |
+
"loss": 0.2647,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.1340445578098297,
|
| 281 |
+
"learning_rate": 0.0018485409999318679,
|
| 282 |
+
"loss": 0.2614,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.14075660705566406,
|
| 288 |
+
"learning_rate": 0.0018359989555609354,
|
| 289 |
+
"loss": 0.2679,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.20850388705730438,
|
| 295 |
+
"learning_rate": 0.0018230042030076452,
|
| 296 |
+
"loss": 0.2681,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.21324606239795685,
|
| 302 |
+
"learning_rate": 0.0018095637791597462,
|
| 303 |
+
"loss": 0.2577,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.13143779337406158,
|
| 309 |
+
"learning_rate": 0.0017956849622438554,
|
| 310 |
+
"loss": 0.2522,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.12891457974910736,
|
| 316 |
+
"learning_rate": 0.0017813752678841701,
|
| 317 |
+
"loss": 0.2538,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.13101314008235931,
|
| 323 |
+
"learning_rate": 0.0017666424450326248,
|
| 324 |
+
"loss": 0.263,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.12617208063602448,
|
| 330 |
+
"learning_rate": 0.0017514944717726961,
|
| 331 |
+
"loss": 0.2556,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.12779447436332703,
|
| 337 |
+
"learning_rate": 0.001735939550999131,
|
| 338 |
+
"loss": 0.2578,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.10568287968635559,
|
| 344 |
+
"learning_rate": 0.0017199861059759337,
|
| 345 |
+
"loss": 0.2586,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.12661688029766083,
|
| 351 |
+
"learning_rate": 0.0017036427757750204,
|
| 352 |
+
"loss": 0.2604,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.12812583148479462,
|
| 358 |
+
"learning_rate": 0.0016869184105980089,
|
| 359 |
+
"loss": 0.2493,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.11161061376333237,
|
| 365 |
+
"learning_rate": 0.0016698220669836782,
|
| 366 |
+
"loss": 0.2259,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.09668291360139847,
|
| 372 |
+
"learning_rate": 0.0016523630029036931,
|
| 373 |
+
"loss": 0.235,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.13240110874176025,
|
| 379 |
+
"learning_rate": 0.00163455067274925,
|
| 380 |
+
"loss": 0.2315,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.11607251316308975,
|
| 386 |
+
"learning_rate": 0.0016163947222113566,
|
| 387 |
+
"loss": 0.2352,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.08608829230070114,
|
| 393 |
+
"learning_rate": 0.0015979049830575189,
|
| 394 |
+
"loss": 0.2397,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.10748381912708282,
|
| 400 |
+
"learning_rate": 0.001579091467807668,
|
| 401 |
+
"loss": 0.238,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.11148440837860107,
|
| 407 |
+
"learning_rate": 0.0015599643643122022,
|
| 408 |
+
"loss": 0.2302,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.10197737067937851,
|
| 414 |
+
"learning_rate": 0.0015405340302350869,
|
| 415 |
+
"loss": 0.2375,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.09502054750919342,
|
| 421 |
+
"learning_rate": 0.0015208109874449978,
|
| 422 |
+
"loss": 0.2336,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.13474340736865997,
|
| 428 |
+
"learning_rate": 0.00150080591631754,
|
| 429 |
+
"loss": 0.2351,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.10017239302396774,
|
| 435 |
+
"learning_rate": 0.0014805296499516406,
|
| 436 |
+
"loss": 0.2395,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.0989546924829483,
|
| 442 |
+
"learning_rate": 0.0014599931683032326,
|
| 443 |
+
"loss": 0.2301,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.09925360232591629,
|
| 449 |
+
"learning_rate": 0.0014392075922394132,
|
| 450 |
+
"loss": 0.2296,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.09363091737031937,
|
| 456 |
+
"learning_rate": 0.0014181841775163014,
|
| 457 |
+
"loss": 0.225,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.09964188933372498,
|
| 463 |
+
"learning_rate": 0.0013969343086838457,
|
| 464 |
+
"loss": 0.2249,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.09907900542020798,
|
| 470 |
+
"learning_rate": 0.001375469492920889,
|
| 471 |
+
"loss": 0.2189,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.0817643478512764,
|
| 477 |
+
"learning_rate": 0.0013538013538038295,
|
| 478 |
+
"loss": 0.2247,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.09736189246177673,
|
| 484 |
+
"learning_rate": 0.0013319416250122482,
|
| 485 |
+
"loss": 0.2231,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.09870969504117966,
|
| 491 |
+
"learning_rate": 0.0013099021439749155,
|
| 492 |
+
"loss": 0.2238,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.12818622589111328,
|
| 498 |
+
"learning_rate": 0.001287694845459613,
|
| 499 |
+
"loss": 0.2251,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.09466996788978577,
|
| 505 |
+
"learning_rate": 0.001265331755110247,
|
| 506 |
+
"loss": 0.2252,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.11681800335645676,
|
| 512 |
+
"learning_rate": 0.001242824982934751,
|
| 513 |
+
"loss": 0.2126,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.08363161236047745,
|
| 519 |
+
"learning_rate": 0.0012201867167473015,
|
| 520 |
+
"loss": 0.2192,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.10942152142524719,
|
| 526 |
+
"learning_rate": 0.001197429215568403,
|
| 527 |
+
"loss": 0.224,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.08799609541893005,
|
| 533 |
+
"learning_rate": 0.00117456480298641,
|
| 534 |
+
"loss": 0.2239,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.10763717442750931,
|
| 540 |
+
"learning_rate": 0.0011516058604840891,
|
| 541 |
+
"loss": 0.2209,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.0964960902929306,
|
| 547 |
+
"learning_rate": 0.001128564820733825,
|
| 548 |
+
"loss": 0.2199,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.08642950654029846,
|
| 554 |
+
"learning_rate": 0.001105454160865112,
|
| 555 |
+
"loss": 0.2101,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.10640878975391388,
|
| 561 |
+
"learning_rate": 0.0010822863957079655,
|
| 562 |
+
"loss": 0.2234,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.09686343371868134,
|
| 568 |
+
"learning_rate": 0.0010590740710159229,
|
| 569 |
+
"loss": 0.2231,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.0885419249534607,
|
| 575 |
+
"learning_rate": 0.0010358297566722945,
|
| 576 |
+
"loss": 0.2086,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.09378661215305328,
|
| 582 |
+
"learning_rate": 0.0010125660398833528,
|
| 583 |
+
"loss": 0.2177,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.0935426726937294,
|
| 589 |
+
"learning_rate": 0.0009892955183621354,
|
| 590 |
+
"loss": 0.2227,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.09251715987920761,
|
| 596 |
+
"learning_rate": 0.0009660307935065639,
|
| 597 |
+
"loss": 0.2185,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.09296725690364838,
|
| 603 |
+
"learning_rate": 0.000942784463575562,
|
| 604 |
+
"loss": 0.2172,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.09848319739103317,
|
| 610 |
+
"learning_rate": 0.0009195691168668773,
|
| 611 |
+
"loss": 0.21,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.07870358228683472,
|
| 617 |
+
"learning_rate": 0.0008963973249002967,
|
| 618 |
+
"loss": 0.2269,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.0861450806260109,
|
| 624 |
+
"learning_rate": 0.0008732816356099456,
|
| 625 |
+
"loss": 0.2176,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.09164830297231674,
|
| 631 |
+
"learning_rate": 0.0008502345665493598,
|
| 632 |
+
"loss": 0.2092,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.08600718528032303,
|
| 638 |
+
"learning_rate": 0.0008272685981130109,
|
| 639 |
+
"loss": 0.219,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.08620688319206238,
|
| 645 |
+
"learning_rate": 0.0008043961667779519,
|
| 646 |
+
"loss": 0.2103,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.09346953779459,
|
| 652 |
+
"learning_rate": 0.0007816296583692448,
|
| 653 |
+
"loss": 0.2135,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.08523987978696823,
|
| 659 |
+
"learning_rate": 0.0007589814013528185,
|
| 660 |
+
"loss": 0.2169,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.3015783429145813,
|
| 666 |
+
"learning_rate": 0.0007364636601593875,
|
| 667 |
+
"loss": 0.2085,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.09597095847129822,
|
| 673 |
+
"learning_rate": 0.0007140886285430447,
|
| 674 |
+
"loss": 0.2321,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.08514101058244705,
|
| 680 |
+
"learning_rate": 0.0006918684229781305,
|
| 681 |
+
"loss": 0.2164,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.1003098413348198,
|
| 687 |
+
"learning_rate": 0.0006698150760979462,
|
| 688 |
+
"loss": 0.2087,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.08927905559539795,
|
| 694 |
+
"learning_rate": 0.0006479405301788728,
|
| 695 |
+
"loss": 0.2099,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.09423171728849411,
|
| 701 |
+
"learning_rate": 0.0006262566306734155,
|
| 702 |
+
"loss": 0.2154,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.08049587905406952,
|
| 708 |
+
"learning_rate": 0.0006047751197956838,
|
| 709 |
+
"loss": 0.2115,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.08967506885528564,
|
| 715 |
+
"learning_rate": 0.0005835076301627752,
|
| 716 |
+
"loss": 0.1896,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.10433576256036758,
|
| 722 |
+
"learning_rate": 0.0005624656784955071,
|
| 723 |
+
"loss": 0.1821,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.087051622569561,
|
| 729 |
+
"learning_rate": 0.0005416606593819101,
|
| 730 |
+
"loss": 0.1907,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.10010000318288803,
|
| 736 |
+
"learning_rate": 0.0005211038391068576,
|
| 737 |
+
"loss": 0.181,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.08194483816623688,
|
| 743 |
+
"learning_rate": 0.0005008063495511723,
|
| 744 |
+
"loss": 0.1898,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.08231155574321747,
|
| 750 |
+
"learning_rate": 0.00048077918216351856,
|
| 751 |
+
"loss": 0.1825,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.08368801325559616,
|
| 757 |
+
"learning_rate": 0.0004610331820083378,
|
| 758 |
+
"loss": 0.1903,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.09819929301738739,
|
| 764 |
+
"learning_rate": 0.00044157904189305407,
|
| 765 |
+
"loss": 0.186,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.09869074076414108,
|
| 771 |
+
"learning_rate": 0.0004224272965777326,
|
| 772 |
+
"loss": 0.1913,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.08644938468933105,
|
| 778 |
+
"learning_rate": 0.00040358831707031816,
|
| 779 |
+
"loss": 0.1877,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.08564961701631546,
|
| 785 |
+
"learning_rate": 0.00038507230501055477,
|
| 786 |
+
"loss": 0.1818,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.08038187026977539,
|
| 792 |
+
"learning_rate": 0.00036688928714561443,
|
| 793 |
+
"loss": 0.1822,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.08794587105512619,
|
| 799 |
+
"learning_rate": 0.00034904910990043927,
|
| 800 |
+
"loss": 0.1922,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.08452560752630234,
|
| 806 |
+
"learning_rate": 0.0003315614340457279,
|
| 807 |
+
"loss": 0.1798,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.08093718439340591,
|
| 813 |
+
"learning_rate": 0.0003144357294664565,
|
| 814 |
+
"loss": 0.1838,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.08113941550254822,
|
| 820 |
+
"learning_rate": 0.0002976812700337708,
|
| 821 |
+
"loss": 0.1912,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.07102052122354507,
|
| 827 |
+
"learning_rate": 0.00028130712858302065,
|
| 828 |
+
"loss": 0.1807,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.10384222865104675,
|
| 834 |
+
"learning_rate": 0.0002653221720006586,
|
| 835 |
+
"loss": 0.187,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.0866876021027565,
|
| 841 |
+
"learning_rate": 0.00024973505642266624,
|
| 842 |
+
"loss": 0.1802,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.08205445855855942,
|
| 848 |
+
"learning_rate": 0.00023455422254710235,
|
| 849 |
+
"loss": 0.1822,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.08937009423971176,
|
| 855 |
+
"learning_rate": 0.00021978789106331665,
|
| 856 |
+
"loss": 0.1834,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.08640176802873611,
|
| 862 |
+
"learning_rate": 0.00020544405820030164,
|
| 863 |
+
"loss": 0.1867,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.08852454274892807,
|
| 869 |
+
"learning_rate": 0.00019153049139659383,
|
| 870 |
+
"loss": 0.1865,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.09717807173728943,
|
| 876 |
+
"learning_rate": 0.00017805472509406696,
|
| 877 |
+
"loss": 0.1803,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.09453548491001129,
|
| 883 |
+
"learning_rate": 0.00016502405665790132,
|
| 884 |
+
"loss": 0.1888,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.08937527984380722,
|
| 890 |
+
"learning_rate": 0.00015244554242493024,
|
| 891 |
+
"loss": 0.1801,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.09483719617128372,
|
| 897 |
+
"learning_rate": 0.000140325993882509,
|
| 898 |
+
"loss": 0.1845,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.07975872606039047,
|
| 904 |
+
"learning_rate": 0.00012867197397997533,
|
| 905 |
+
"loss": 0.1804,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.08552587777376175,
|
| 911 |
+
"learning_rate": 0.00011748979357469624,
|
| 912 |
+
"loss": 0.1769,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.09845510870218277,
|
| 918 |
+
"learning_rate": 0.00010678550801462661,
|
| 919 |
+
"loss": 0.1791,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.08743355423212051,
|
| 925 |
+
"learning_rate": 9.656491385923216e-05,
|
| 926 |
+
"loss": 0.1807,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.08147986233234406,
|
| 932 |
+
"learning_rate": 8.683354574055058e-05,
|
| 933 |
+
"loss": 0.18,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.0853717103600502,
|
| 939 |
+
"learning_rate": 7.759667336609011e-05,
|
| 940 |
+
"loss": 0.1828,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.09387767314910889,
|
| 946 |
+
"learning_rate": 6.885929866518948e-05,
|
| 947 |
+
"loss": 0.1853,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.08295658230781555,
|
| 953 |
+
"learning_rate": 6.062615308038599e-05,
|
| 954 |
+
"loss": 0.1807,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.11373701691627502,
|
| 960 |
+
"learning_rate": 5.290169500525577e-05,
|
| 961 |
+
"loss": 0.192,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.0731569230556488,
|
| 967 |
+
"learning_rate": 4.569010737011381e-05,
|
| 968 |
+
"loss": 0.1833,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.08211272954940796,
|
| 974 |
+
"learning_rate": 3.8995295376884954e-05,
|
| 975 |
+
"loss": 0.1728,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.09878759831190109,
|
| 981 |
+
"learning_rate": 3.2820884384367145e-05,
|
| 982 |
+
"loss": 0.1777,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.09063831716775894,
|
| 988 |
+
"learning_rate": 2.7170217945036156e-05,
|
| 989 |
+
"loss": 0.179,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.0808299332857132,
|
| 995 |
+
"learning_rate": 2.2046355994452594e-05,
|
| 996 |
+
"loss": 0.181,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.09642569720745087,
|
| 1002 |
+
"learning_rate": 1.745207319425324e-05,
|
| 1003 |
+
"loss": 0.177,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.09298644214868546,
|
| 1009 |
+
"learning_rate": 1.338985742962251e-05,
|
| 1010 |
+
"loss": 0.1752,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.08527287095785141,
|
| 1016 |
+
"learning_rate": 9.8619084620587e-06,
|
| 1017 |
+
"loss": 0.1824,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.09812653809785843,
|
| 1023 |
+
"learning_rate": 6.870136738164612e-06,
|
| 1024 |
+
"loss": 0.1835,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.10046068578958511,
|
| 1030 |
+
"learning_rate": 4.416162355107001e-06,
|
| 1031 |
+
"loss": 0.1799,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.08379827439785004,
|
| 1037 |
+
"learning_rate": 2.5013141833046815e-06,
|
| 1038 |
+
"loss": 0.1742,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.10984520614147186,
|
| 1044 |
+
"learning_rate": 1.1266291468229327e-06,
|
| 1045 |
+
"loss": 0.1771,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.07823975384235382,
|
| 1051 |
+
"learning_rate": 2.928516618599941e-07,
|
| 1052 |
+
"loss": 0.1748,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.07705021649599075,
|
| 1058 |
+
"learning_rate": 4.332336324130992e-10,
|
| 1059 |
+
"loss": 0.1822,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.24114867858886718,
|
| 1067 |
+
"train_runtime": 3465.2523,
|
| 1068 |
+
"train_samples_per_second": 34.63,
|
| 1069 |
+
"train_steps_per_second": 1.082
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|
nl_tasks/exps/run_ex26/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"v_proj",
|
| 14 |
+
"q_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex26/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex26/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex26/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex26/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex26/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"v_proj",
|
| 14 |
+
"q_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex26/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93bc9ae21a36df9047de9bf8857c7f1171e5589422f1934b10a15b42ec3323e2
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex26/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.308439701795578,
|
| 15 |
+
"learning_rate": 0.00032,
|
| 16 |
+
"loss": 0.7082,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.4491778016090393,
|
| 22 |
+
"learning_rate": 0.0006533333333333332,
|
| 23 |
+
"loss": 0.4048,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.205202117562294,
|
| 29 |
+
"learning_rate": 0.0009866666666666667,
|
| 30 |
+
"loss": 0.3748,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.20341382920742035,
|
| 36 |
+
"learning_rate": 0.00132,
|
| 37 |
+
"loss": 0.3515,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.38580241799354553,
|
| 43 |
+
"learning_rate": 0.0016533333333333333,
|
| 44 |
+
"loss": 0.3459,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 1.0493764877319336,
|
| 50 |
+
"learning_rate": 0.0019866666666666665,
|
| 51 |
+
"loss": 0.3969,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.38665246963500977,
|
| 57 |
+
"learning_rate": 0.00232,
|
| 58 |
+
"loss": 0.4444,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.3482854664325714,
|
| 64 |
+
"learning_rate": 0.002653333333333333,
|
| 65 |
+
"loss": 0.3755,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.33611181378364563,
|
| 71 |
+
"learning_rate": 0.002986666666666667,
|
| 72 |
+
"loss": 0.3769,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.2831563949584961,
|
| 78 |
+
"learning_rate": 0.00332,
|
| 79 |
+
"loss": 0.3664,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.48948103189468384,
|
| 85 |
+
"learning_rate": 0.0036533333333333335,
|
| 86 |
+
"loss": 0.4322,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.44456496834754944,
|
| 92 |
+
"learning_rate": 0.003986666666666667,
|
| 93 |
+
"loss": 0.359,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.23869431018829346,
|
| 99 |
+
"learning_rate": 0.00432,
|
| 100 |
+
"loss": 0.3418,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.18535733222961426,
|
| 106 |
+
"learning_rate": 0.004653333333333333,
|
| 107 |
+
"loss": 0.3417,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.18292084336280823,
|
| 113 |
+
"learning_rate": 0.004986666666666666,
|
| 114 |
+
"loss": 0.3265,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.14866161346435547,
|
| 120 |
+
"learning_rate": 0.004999376169470306,
|
| 121 |
+
"loss": 0.3215,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.12140727043151855,
|
| 127 |
+
"learning_rate": 0.004997399965737237,
|
| 128 |
+
"loss": 0.3041,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.12332931160926819,
|
| 134 |
+
"learning_rate": 0.0049940713758658975,
|
| 135 |
+
"loss": 0.3055,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.13055121898651123,
|
| 141 |
+
"learning_rate": 0.004989392202346423,
|
| 142 |
+
"loss": 0.2984,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.12080452591180801,
|
| 148 |
+
"learning_rate": 0.004983364979033819,
|
| 149 |
+
"loss": 0.2944,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.094502754509449,
|
| 155 |
+
"learning_rate": 0.004975992969775836,
|
| 156 |
+
"loss": 0.2931,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.13612881302833557,
|
| 162 |
+
"learning_rate": 0.004967280166645538,
|
| 163 |
+
"loss": 0.2814,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.1155838817358017,
|
| 169 |
+
"learning_rate": 0.004957231287779522,
|
| 170 |
+
"loss": 0.3145,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.1393253207206726,
|
| 176 |
+
"learning_rate": 0.004945851774822969,
|
| 177 |
+
"loss": 0.2847,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.09930860251188278,
|
| 183 |
+
"learning_rate": 0.0049331477899828904,
|
| 184 |
+
"loss": 0.2955,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.08679531514644623,
|
| 190 |
+
"learning_rate": 0.0049191262126911895,
|
| 191 |
+
"loss": 0.2871,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.06685009598731995,
|
| 197 |
+
"learning_rate": 0.004903794635879325,
|
| 198 |
+
"loss": 0.2745,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.09721576422452927,
|
| 204 |
+
"learning_rate": 0.004887161361866608,
|
| 205 |
+
"loss": 0.2748,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.11243274807929993,
|
| 211 |
+
"learning_rate": 0.004869235397864343,
|
| 212 |
+
"loss": 0.2826,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.10606742650270462,
|
| 218 |
+
"learning_rate": 0.00485002645109827,
|
| 219 |
+
"loss": 0.2782,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.09463471174240112,
|
| 225 |
+
"learning_rate": 0.004829544923551931,
|
| 226 |
+
"loss": 0.268,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.08187498897314072,
|
| 232 |
+
"learning_rate": 0.004807801906333809,
|
| 233 |
+
"loss": 0.2718,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.07924027740955353,
|
| 239 |
+
"learning_rate": 0.004784809173671296,
|
| 240 |
+
"loss": 0.2642,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.08812413364648819,
|
| 246 |
+
"learning_rate": 0.004760579176534747,
|
| 247 |
+
"loss": 0.2668,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.06769583374261856,
|
| 253 |
+
"learning_rate": 0.004735125035895054,
|
| 254 |
+
"loss": 0.2744,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.07282879948616028,
|
| 260 |
+
"learning_rate": 0.004708460535618411,
|
| 261 |
+
"loss": 0.2693,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.07845976203680038,
|
| 267 |
+
"learning_rate": 0.0046806001150021094,
|
| 268 |
+
"loss": 0.264,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.07892794162034988,
|
| 274 |
+
"learning_rate": 0.004651558860955401,
|
| 275 |
+
"loss": 0.2638,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.06784877926111221,
|
| 281 |
+
"learning_rate": 0.004621352499829669,
|
| 282 |
+
"loss": 0.2594,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.09675692021846771,
|
| 288 |
+
"learning_rate": 0.0045899973889023385,
|
| 289 |
+
"loss": 0.2654,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.08054903894662857,
|
| 295 |
+
"learning_rate": 0.004557510507519113,
|
| 296 |
+
"loss": 0.2652,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.07471887022256851,
|
| 302 |
+
"learning_rate": 0.004523909447899366,
|
| 303 |
+
"loss": 0.2558,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.07312784343957901,
|
| 309 |
+
"learning_rate": 0.004489212405609638,
|
| 310 |
+
"loss": 0.2532,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.08276063948869705,
|
| 316 |
+
"learning_rate": 0.004453438169710425,
|
| 317 |
+
"loss": 0.2523,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.07462652772665024,
|
| 323 |
+
"learning_rate": 0.004416606112581562,
|
| 324 |
+
"loss": 0.2619,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.06282167881727219,
|
| 330 |
+
"learning_rate": 0.0043787361794317406,
|
| 331 |
+
"loss": 0.2544,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.06843234598636627,
|
| 337 |
+
"learning_rate": 0.004339848877497827,
|
| 338 |
+
"loss": 0.2573,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.05832453444600105,
|
| 344 |
+
"learning_rate": 0.004299965264939834,
|
| 345 |
+
"loss": 0.2563,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.07380504906177521,
|
| 351 |
+
"learning_rate": 0.004259106939437551,
|
| 352 |
+
"loss": 0.2604,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.0749950185418129,
|
| 358 |
+
"learning_rate": 0.004217296026495022,
|
| 359 |
+
"loss": 0.2478,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.0739532932639122,
|
| 365 |
+
"learning_rate": 0.004174555167459195,
|
| 366 |
+
"loss": 0.2262,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.055929724127054214,
|
| 372 |
+
"learning_rate": 0.0041309075072592325,
|
| 373 |
+
"loss": 0.2335,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.06944834440946579,
|
| 379 |
+
"learning_rate": 0.004086376681873125,
|
| 380 |
+
"loss": 0.23,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.06275296211242676,
|
| 386 |
+
"learning_rate": 0.004040986805528391,
|
| 387 |
+
"loss": 0.237,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.052883587777614594,
|
| 393 |
+
"learning_rate": 0.003994762457643798,
|
| 394 |
+
"loss": 0.2398,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.06906875967979431,
|
| 400 |
+
"learning_rate": 0.00394772866951917,
|
| 401 |
+
"loss": 0.2365,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.07487814128398895,
|
| 407 |
+
"learning_rate": 0.003899910910780505,
|
| 408 |
+
"loss": 0.2292,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.060511570423841476,
|
| 414 |
+
"learning_rate": 0.0038513350755877175,
|
| 415 |
+
"loss": 0.2362,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.07363221794366837,
|
| 421 |
+
"learning_rate": 0.0038020274686124947,
|
| 422 |
+
"loss": 0.2331,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.06942912936210632,
|
| 428 |
+
"learning_rate": 0.0037520147907938502,
|
| 429 |
+
"loss": 0.2346,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.05655023828148842,
|
| 435 |
+
"learning_rate": 0.003701324124879102,
|
| 436 |
+
"loss": 0.2382,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.05511761084198952,
|
| 442 |
+
"learning_rate": 0.0036499829207580813,
|
| 443 |
+
"loss": 0.2301,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.05750414729118347,
|
| 449 |
+
"learning_rate": 0.003598018980598533,
|
| 450 |
+
"loss": 0.2287,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.05184759199619293,
|
| 456 |
+
"learning_rate": 0.0035454604437907536,
|
| 457 |
+
"loss": 0.2248,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.05860360339283943,
|
| 463 |
+
"learning_rate": 0.0034923357717096142,
|
| 464 |
+
"loss": 0.2265,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.059097886085510254,
|
| 470 |
+
"learning_rate": 0.0034386737323022225,
|
| 471 |
+
"loss": 0.2201,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.04928320646286011,
|
| 477 |
+
"learning_rate": 0.0033845033845095736,
|
| 478 |
+
"loss": 0.2244,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.05335696414113045,
|
| 484 |
+
"learning_rate": 0.0033298540625306206,
|
| 485 |
+
"loss": 0.2238,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.057368285953998566,
|
| 491 |
+
"learning_rate": 0.0032747553599372886,
|
| 492 |
+
"loss": 0.2242,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.06468666344881058,
|
| 498 |
+
"learning_rate": 0.0032192371136490323,
|
| 499 |
+
"loss": 0.2243,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.057294175028800964,
|
| 505 |
+
"learning_rate": 0.0031633293877756175,
|
| 506 |
+
"loss": 0.2262,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.05811116099357605,
|
| 512 |
+
"learning_rate": 0.0031070624573368774,
|
| 513 |
+
"loss": 0.2128,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.04844126105308533,
|
| 519 |
+
"learning_rate": 0.0030504667918682536,
|
| 520 |
+
"loss": 0.2191,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.06403350085020065,
|
| 526 |
+
"learning_rate": 0.0029935730389210077,
|
| 527 |
+
"loss": 0.2238,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.059780120849609375,
|
| 533 |
+
"learning_rate": 0.002936412007466025,
|
| 534 |
+
"loss": 0.2242,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.06413201242685318,
|
| 540 |
+
"learning_rate": 0.002879014651210223,
|
| 541 |
+
"loss": 0.2215,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.056221649050712585,
|
| 547 |
+
"learning_rate": 0.002821412051834563,
|
| 548 |
+
"loss": 0.2206,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.05171119421720505,
|
| 554 |
+
"learning_rate": 0.00276363540216278,
|
| 555 |
+
"loss": 0.21,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.05907101929187775,
|
| 561 |
+
"learning_rate": 0.002705715989269914,
|
| 562 |
+
"loss": 0.2226,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.055108439177274704,
|
| 568 |
+
"learning_rate": 0.002647685177539807,
|
| 569 |
+
"loss": 0.2226,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.05309450253844261,
|
| 575 |
+
"learning_rate": 0.0025895743916807363,
|
| 576 |
+
"loss": 0.2087,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.05426125228404999,
|
| 582 |
+
"learning_rate": 0.0025314150997083817,
|
| 583 |
+
"loss": 0.2185,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.0457584485411644,
|
| 589 |
+
"learning_rate": 0.0024732387959053386,
|
| 590 |
+
"loss": 0.2227,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.05757022649049759,
|
| 596 |
+
"learning_rate": 0.00241507698376641,
|
| 597 |
+
"loss": 0.2193,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.05329664424061775,
|
| 603 |
+
"learning_rate": 0.002356961158938905,
|
| 604 |
+
"loss": 0.2184,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.05725402012467384,
|
| 610 |
+
"learning_rate": 0.002298922792167193,
|
| 611 |
+
"loss": 0.2099,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.04788336530327797,
|
| 617 |
+
"learning_rate": 0.002240993312250742,
|
| 618 |
+
"loss": 0.226,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.04869900271296501,
|
| 624 |
+
"learning_rate": 0.002183204089024864,
|
| 625 |
+
"loss": 0.2172,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.05592982470989227,
|
| 631 |
+
"learning_rate": 0.0021255864163733994,
|
| 632 |
+
"loss": 0.2084,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.049687620252370834,
|
| 638 |
+
"learning_rate": 0.0020681714952825274,
|
| 639 |
+
"loss": 0.2188,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.04963243752717972,
|
| 645 |
+
"learning_rate": 0.00201099041694488,
|
| 646 |
+
"loss": 0.2109,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.05767963454127312,
|
| 652 |
+
"learning_rate": 0.0019540741459231122,
|
| 653 |
+
"loss": 0.2138,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.04665152728557587,
|
| 659 |
+
"learning_rate": 0.0018974535033820464,
|
| 660 |
+
"loss": 0.2168,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.052285660058259964,
|
| 666 |
+
"learning_rate": 0.0018411591503984686,
|
| 667 |
+
"loss": 0.2088,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.056856900453567505,
|
| 673 |
+
"learning_rate": 0.001785221571357612,
|
| 674 |
+
"loss": 0.232,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.05064110830426216,
|
| 680 |
+
"learning_rate": 0.0017296710574453262,
|
| 681 |
+
"loss": 0.2169,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.056195858865976334,
|
| 687 |
+
"learning_rate": 0.0016745376902448656,
|
| 688 |
+
"loss": 0.2089,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.0578073114156723,
|
| 694 |
+
"learning_rate": 0.0016198513254471819,
|
| 695 |
+
"loss": 0.2105,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.04981910437345505,
|
| 701 |
+
"learning_rate": 0.0015656415766835386,
|
| 702 |
+
"loss": 0.2156,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.0438181608915329,
|
| 708 |
+
"learning_rate": 0.0015119377994892095,
|
| 709 |
+
"loss": 0.2116,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.05480600520968437,
|
| 715 |
+
"learning_rate": 0.001458769075406938,
|
| 716 |
+
"loss": 0.1884,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.05522935092449188,
|
| 722 |
+
"learning_rate": 0.0014061641962387678,
|
| 723 |
+
"loss": 0.1811,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.057728227227926254,
|
| 729 |
+
"learning_rate": 0.0013541516484547754,
|
| 730 |
+
"loss": 0.1887,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.056935157626867294,
|
| 736 |
+
"learning_rate": 0.001302759597767144,
|
| 737 |
+
"loss": 0.1803,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.04977869242429733,
|
| 743 |
+
"learning_rate": 0.0012520158738779307,
|
| 744 |
+
"loss": 0.1889,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.050365712493658066,
|
| 750 |
+
"learning_rate": 0.0012019479554087964,
|
| 751 |
+
"loss": 0.1809,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.04901596158742905,
|
| 757 |
+
"learning_rate": 0.0011525829550208444,
|
| 758 |
+
"loss": 0.1895,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.05424964427947998,
|
| 764 |
+
"learning_rate": 0.0011039476047326351,
|
| 765 |
+
"loss": 0.1852,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.052948519587516785,
|
| 771 |
+
"learning_rate": 0.0010560682414443316,
|
| 772 |
+
"loss": 0.1908,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.05285203084349632,
|
| 778 |
+
"learning_rate": 0.0010089707926757955,
|
| 779 |
+
"loss": 0.1861,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.047558143734931946,
|
| 785 |
+
"learning_rate": 0.000962680762526387,
|
| 786 |
+
"loss": 0.1811,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.050169140100479126,
|
| 792 |
+
"learning_rate": 0.000917223217864036,
|
| 793 |
+
"loss": 0.1824,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.04792075231671333,
|
| 799 |
+
"learning_rate": 0.0008726227747510981,
|
| 800 |
+
"loss": 0.1917,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.048594892024993896,
|
| 806 |
+
"learning_rate": 0.0008289035851143198,
|
| 807 |
+
"loss": 0.1794,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.04949982091784477,
|
| 813 |
+
"learning_rate": 0.0007860893236661412,
|
| 814 |
+
"loss": 0.1831,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.052018143236637115,
|
| 820 |
+
"learning_rate": 0.0007442031750844269,
|
| 821 |
+
"loss": 0.1894,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.04490148648619652,
|
| 827 |
+
"learning_rate": 0.0007032678214575517,
|
| 828 |
+
"loss": 0.1788,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.05236239731311798,
|
| 834 |
+
"learning_rate": 0.0006633054300016464,
|
| 835 |
+
"loss": 0.1864,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.047926224768161774,
|
| 841 |
+
"learning_rate": 0.0006243376410566656,
|
| 842 |
+
"loss": 0.1798,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.05037904903292656,
|
| 848 |
+
"learning_rate": 0.0005863855563677558,
|
| 849 |
+
"loss": 0.181,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.05419327691197395,
|
| 855 |
+
"learning_rate": 0.0005494697276582916,
|
| 856 |
+
"loss": 0.1822,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.05214129015803337,
|
| 862 |
+
"learning_rate": 0.0005136101455007541,
|
| 863 |
+
"loss": 0.1854,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.0533323772251606,
|
| 869 |
+
"learning_rate": 0.0004788262284914846,
|
| 870 |
+
"loss": 0.1855,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.05604487657546997,
|
| 876 |
+
"learning_rate": 0.00044513681273516736,
|
| 877 |
+
"loss": 0.1793,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.05135011672973633,
|
| 883 |
+
"learning_rate": 0.0004125601416447533,
|
| 884 |
+
"loss": 0.1876,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.0539252869784832,
|
| 890 |
+
"learning_rate": 0.0003811138560623256,
|
| 891 |
+
"loss": 0.1782,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.05228358134627342,
|
| 897 |
+
"learning_rate": 0.0003508149847062725,
|
| 898 |
+
"loss": 0.1831,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.046433333307504654,
|
| 904 |
+
"learning_rate": 0.0003216799349499383,
|
| 905 |
+
"loss": 0.1793,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.04742318019270897,
|
| 911 |
+
"learning_rate": 0.0002937244839367406,
|
| 912 |
+
"loss": 0.1761,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.05307656154036522,
|
| 918 |
+
"learning_rate": 0.0002669637700365665,
|
| 919 |
+
"loss": 0.177,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.055305834859609604,
|
| 925 |
+
"learning_rate": 0.0002414122846480804,
|
| 926 |
+
"loss": 0.1793,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.04545231908559799,
|
| 932 |
+
"learning_rate": 0.00021708386435137644,
|
| 933 |
+
"loss": 0.178,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.04912173002958298,
|
| 939 |
+
"learning_rate": 0.00019399168341522528,
|
| 940 |
+
"loss": 0.1804,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.0657554641366005,
|
| 946 |
+
"learning_rate": 0.0001721482466629737,
|
| 947 |
+
"loss": 0.1834,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.047934047877788544,
|
| 953 |
+
"learning_rate": 0.00015156538270096498,
|
| 954 |
+
"loss": 0.1805,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.06532304733991623,
|
| 960 |
+
"learning_rate": 0.00013225423751313942,
|
| 961 |
+
"loss": 0.1908,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.03938894718885422,
|
| 967 |
+
"learning_rate": 0.00011422526842528452,
|
| 968 |
+
"loss": 0.1817,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.046688247472047806,
|
| 974 |
+
"learning_rate": 9.748823844221238e-05,
|
| 975 |
+
"loss": 0.1727,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.05760841444134712,
|
| 981 |
+
"learning_rate": 8.205221096091786e-05,
|
| 982 |
+
"loss": 0.1756,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.0510096400976181,
|
| 988 |
+
"learning_rate": 6.79255448625904e-05,
|
| 989 |
+
"loss": 0.1767,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.049026183784008026,
|
| 995 |
+
"learning_rate": 5.511588998613149e-05,
|
| 996 |
+
"loss": 0.1789,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.05264630913734436,
|
| 1002 |
+
"learning_rate": 4.3630182985633096e-05,
|
| 1003 |
+
"loss": 0.1751,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.05136013776063919,
|
| 1009 |
+
"learning_rate": 3.3474643574056276e-05,
|
| 1010 |
+
"loss": 0.1726,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.049536991864442825,
|
| 1016 |
+
"learning_rate": 2.465477115514675e-05,
|
| 1017 |
+
"loss": 0.1811,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.05471661314368248,
|
| 1023 |
+
"learning_rate": 1.717534184541153e-05,
|
| 1024 |
+
"loss": 0.1807,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.059411708265542984,
|
| 1030 |
+
"learning_rate": 1.1040405887767503e-05,
|
| 1031 |
+
"loss": 0.1778,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.048990510404109955,
|
| 1037 |
+
"learning_rate": 6.253285458261704e-06,
|
| 1038 |
+
"loss": 0.1737,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.053671903908252716,
|
| 1044 |
+
"learning_rate": 2.816572867057332e-06,
|
| 1045 |
+
"loss": 0.1762,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.043852001428604126,
|
| 1051 |
+
"learning_rate": 7.321291546499853e-07,
|
| 1052 |
+
"loss": 0.1743,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.043689530342817307,
|
| 1058 |
+
"learning_rate": 1.083084081032748e-09,
|
| 1059 |
+
"loss": 0.1797,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.23841547063191731,
|
| 1067 |
+
"train_runtime": 3464.8361,
|
| 1068 |
+
"train_samples_per_second": 34.634,
|
| 1069 |
+
"train_steps_per_second": 1.082
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|
nl_tasks/exps/run_ex27/ft/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": false,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex27/ft/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<s>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "</s>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<unk>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<unk>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
nl_tasks/exps/run_ex27/ft/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
nl_tasks/exps/run_ex27/ft/tokenizer.model
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
| 3 |
+
size 499723
|
nl_tasks/exps/run_ex27/ft/tokenizer_config.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<unk>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<s>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "</s>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"clean_up_tokenization_spaces": false,
|
| 33 |
+
"eos_token": "</s>",
|
| 34 |
+
"extra_special_tokens": {},
|
| 35 |
+
"legacy": false,
|
| 36 |
+
"model_max_length": 512,
|
| 37 |
+
"pad_token": "<unk>",
|
| 38 |
+
"padding_side": "right",
|
| 39 |
+
"sp_model_kwargs": {},
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false
|
| 43 |
+
}
|
nl_tasks/exps/run_ex27/ft2/adapter_config.json
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"T": 1.0,
|
| 3 |
+
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
|
| 4 |
+
"bias": "none",
|
| 5 |
+
"inference_mode": true,
|
| 6 |
+
"layers_to_transform": null,
|
| 7 |
+
"modules_to_save": null,
|
| 8 |
+
"num_rotations": 1,
|
| 9 |
+
"peft_type": "ROTATION",
|
| 10 |
+
"r": 16,
|
| 11 |
+
"revision": null,
|
| 12 |
+
"target_modules": [
|
| 13 |
+
"q_proj",
|
| 14 |
+
"v_proj"
|
| 15 |
+
],
|
| 16 |
+
"target_modules_to_skip": null,
|
| 17 |
+
"task_type": "CAUSAL_LM"
|
| 18 |
+
}
|
nl_tasks/exps/run_ex27/ft2/adapter_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ad569dbd90021db6f9222a57761dec497150748c0f5dbedf597ea06cec130bba
|
| 3 |
+
size 33602915
|
nl_tasks/exps/run_ex27/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.25781920552253723,
|
| 15 |
+
"learning_rate": 0.000512,
|
| 16 |
+
"loss": 0.6638,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 0.19873183965682983,
|
| 22 |
+
"learning_rate": 0.0010453333333333332,
|
| 23 |
+
"loss": 0.3921,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.22360016405582428,
|
| 29 |
+
"learning_rate": 0.0015786666666666668,
|
| 30 |
+
"loss": 0.3705,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.268097460269928,
|
| 36 |
+
"learning_rate": 0.002112,
|
| 37 |
+
"loss": 0.3635,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.74379962682724,
|
| 43 |
+
"learning_rate": 0.0026453333333333333,
|
| 44 |
+
"loss": 0.3711,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.6203864216804504,
|
| 50 |
+
"learning_rate": 0.0031786666666666664,
|
| 51 |
+
"loss": 0.4046,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 0.435539186000824,
|
| 57 |
+
"learning_rate": 0.0037120000000000005,
|
| 58 |
+
"loss": 0.5032,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.23134496808052063,
|
| 64 |
+
"learning_rate": 0.004245333333333333,
|
| 65 |
+
"loss": 0.3804,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.25716540217399597,
|
| 71 |
+
"learning_rate": 0.004778666666666667,
|
| 72 |
+
"loss": 0.3628,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.17056621611118317,
|
| 78 |
+
"learning_rate": 0.005312000000000001,
|
| 79 |
+
"loss": 0.3466,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.1669658124446869,
|
| 85 |
+
"learning_rate": 0.0058453333333333335,
|
| 86 |
+
"loss": 0.333,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.1642168164253235,
|
| 92 |
+
"learning_rate": 0.006378666666666667,
|
| 93 |
+
"loss": 0.3424,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.12736473977565765,
|
| 99 |
+
"learning_rate": 0.006912,
|
| 100 |
+
"loss": 0.3298,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.128292977809906,
|
| 106 |
+
"learning_rate": 0.007445333333333333,
|
| 107 |
+
"loss": 0.3299,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.12831421196460724,
|
| 113 |
+
"learning_rate": 0.007978666666666667,
|
| 114 |
+
"loss": 0.3203,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.12132629007101059,
|
| 120 |
+
"learning_rate": 0.007999001871152489,
|
| 121 |
+
"loss": 0.318,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.10372071713209152,
|
| 127 |
+
"learning_rate": 0.00799583994517958,
|
| 128 |
+
"loss": 0.3041,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.08704401552677155,
|
| 134 |
+
"learning_rate": 0.007990514201385437,
|
| 135 |
+
"loss": 0.3061,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.09257054328918457,
|
| 141 |
+
"learning_rate": 0.007983027523754278,
|
| 142 |
+
"loss": 0.2988,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.09537217020988464,
|
| 148 |
+
"learning_rate": 0.00797338396645411,
|
| 149 |
+
"loss": 0.2937,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.07897094637155533,
|
| 155 |
+
"learning_rate": 0.007961588751641338,
|
| 156 |
+
"loss": 0.2943,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.0941765084862709,
|
| 162 |
+
"learning_rate": 0.007947648266632861,
|
| 163 |
+
"loss": 0.2815,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.08107373118400574,
|
| 169 |
+
"learning_rate": 0.007931570060447236,
|
| 170 |
+
"loss": 0.2995,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.08375738561153412,
|
| 176 |
+
"learning_rate": 0.007913362839716751,
|
| 177 |
+
"loss": 0.2843,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.07901784777641296,
|
| 183 |
+
"learning_rate": 0.007893036463972625,
|
| 184 |
+
"loss": 0.2987,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.07139074057340622,
|
| 190 |
+
"learning_rate": 0.007870601940305904,
|
| 191 |
+
"loss": 0.2893,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.0510406494140625,
|
| 197 |
+
"learning_rate": 0.007846071417406921,
|
| 198 |
+
"loss": 0.2759,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.06341639906167984,
|
| 204 |
+
"learning_rate": 0.007819458178986572,
|
| 205 |
+
"loss": 0.2768,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.07159160822629929,
|
| 211 |
+
"learning_rate": 0.007790776636582948,
|
| 212 |
+
"loss": 0.2831,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.09310116618871689,
|
| 218 |
+
"learning_rate": 0.007760042321757232,
|
| 219 |
+
"loss": 0.2787,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.06396382302045822,
|
| 225 |
+
"learning_rate": 0.00772727187768309,
|
| 226 |
+
"loss": 0.2665,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.07068858295679092,
|
| 232 |
+
"learning_rate": 0.007692483050134094,
|
| 233 |
+
"loss": 0.2731,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.06336300075054169,
|
| 239 |
+
"learning_rate": 0.007655694677874074,
|
| 240 |
+
"loss": 0.2652,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.06757763773202896,
|
| 246 |
+
"learning_rate": 0.007616926682455595,
|
| 247 |
+
"loss": 0.2659,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.050533879548311234,
|
| 253 |
+
"learning_rate": 0.007576200057432086,
|
| 254 |
+
"loss": 0.2748,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.052018292248249054,
|
| 260 |
+
"learning_rate": 0.007533536856989457,
|
| 261 |
+
"loss": 0.2689,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.06127012521028519,
|
| 267 |
+
"learning_rate": 0.007488960184003375,
|
| 268 |
+
"loss": 0.2666,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.05782606080174446,
|
| 274 |
+
"learning_rate": 0.007442494177528641,
|
| 275 |
+
"loss": 0.2643,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.04911736026406288,
|
| 281 |
+
"learning_rate": 0.007394163999727471,
|
| 282 |
+
"loss": 0.2597,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.07270637154579163,
|
| 288 |
+
"learning_rate": 0.007343995822243742,
|
| 289 |
+
"loss": 0.2688,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.0592631995677948,
|
| 295 |
+
"learning_rate": 0.007292016812030581,
|
| 296 |
+
"loss": 0.2664,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.05052737891674042,
|
| 302 |
+
"learning_rate": 0.007238255116638985,
|
| 303 |
+
"loss": 0.2569,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.05483546853065491,
|
| 309 |
+
"learning_rate": 0.007182739848975422,
|
| 310 |
+
"loss": 0.2545,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.05168973654508591,
|
| 316 |
+
"learning_rate": 0.0071255010715366805,
|
| 317 |
+
"loss": 0.2542,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.056612785905599594,
|
| 323 |
+
"learning_rate": 0.007066569780130499,
|
| 324 |
+
"loss": 0.2633,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.04777904227375984,
|
| 330 |
+
"learning_rate": 0.0070059778870907845,
|
| 331 |
+
"loss": 0.2562,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.052652597427368164,
|
| 337 |
+
"learning_rate": 0.006943758203996524,
|
| 338 |
+
"loss": 0.2589,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.04456433653831482,
|
| 344 |
+
"learning_rate": 0.006879944423903735,
|
| 345 |
+
"loss": 0.2576,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.05583874136209488,
|
| 351 |
+
"learning_rate": 0.006814571103100082,
|
| 352 |
+
"loss": 0.2603,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.05586734786629677,
|
| 358 |
+
"learning_rate": 0.0067476736423920354,
|
| 359 |
+
"loss": 0.2496,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.05297844856977463,
|
| 365 |
+
"learning_rate": 0.006679288267934713,
|
| 366 |
+
"loss": 0.2265,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.04152635857462883,
|
| 372 |
+
"learning_rate": 0.0066094520116147725,
|
| 373 |
+
"loss": 0.235,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.052927907556295395,
|
| 379 |
+
"learning_rate": 0.006538202690997,
|
| 380 |
+
"loss": 0.2311,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.051669735461473465,
|
| 386 |
+
"learning_rate": 0.0064655788888454266,
|
| 387 |
+
"loss": 0.2364,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.040959082543849945,
|
| 393 |
+
"learning_rate": 0.0063916199322300755,
|
| 394 |
+
"loss": 0.2401,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.04207254573702812,
|
| 400 |
+
"learning_rate": 0.006316365871230672,
|
| 401 |
+
"loss": 0.2383,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.048728130757808685,
|
| 407 |
+
"learning_rate": 0.006239857457248809,
|
| 408 |
+
"loss": 0.2308,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.043461356312036514,
|
| 414 |
+
"learning_rate": 0.0061621361209403475,
|
| 415 |
+
"loss": 0.2383,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.04390329867601395,
|
| 421 |
+
"learning_rate": 0.006083243949779991,
|
| 422 |
+
"loss": 0.2349,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.07007128745317459,
|
| 428 |
+
"learning_rate": 0.00600322366527016,
|
| 429 |
+
"loss": 0.2378,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.046169713139534,
|
| 435 |
+
"learning_rate": 0.005922118599806563,
|
| 436 |
+
"loss": 0.2425,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.04116000607609749,
|
| 442 |
+
"learning_rate": 0.00583997267321293,
|
| 443 |
+
"loss": 0.232,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.04283928498625755,
|
| 449 |
+
"learning_rate": 0.005756830368957653,
|
| 450 |
+
"loss": 0.2316,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.04096667468547821,
|
| 456 |
+
"learning_rate": 0.005672736710065206,
|
| 457 |
+
"loss": 0.2268,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.045082151889801025,
|
| 463 |
+
"learning_rate": 0.005587737234735383,
|
| 464 |
+
"loss": 0.2276,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.0428253673017025,
|
| 470 |
+
"learning_rate": 0.005501877971683556,
|
| 471 |
+
"loss": 0.2211,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.037039726972579956,
|
| 477 |
+
"learning_rate": 0.005415205415215318,
|
| 478 |
+
"loss": 0.2269,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.04006600379943848,
|
| 484 |
+
"learning_rate": 0.005327766500048993,
|
| 485 |
+
"loss": 0.2251,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.039758071303367615,
|
| 491 |
+
"learning_rate": 0.005239608575899662,
|
| 492 |
+
"loss": 0.2254,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.04773048684000969,
|
| 498 |
+
"learning_rate": 0.005150779381838452,
|
| 499 |
+
"loss": 0.2259,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.03774907439947128,
|
| 505 |
+
"learning_rate": 0.005061327020440988,
|
| 506 |
+
"loss": 0.2277,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.040254510939121246,
|
| 512 |
+
"learning_rate": 0.004971299931739004,
|
| 513 |
+
"loss": 0.2145,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.03746659308671951,
|
| 519 |
+
"learning_rate": 0.004880746866989206,
|
| 520 |
+
"loss": 0.2214,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.04677619785070419,
|
| 526 |
+
"learning_rate": 0.004789716862273612,
|
| 527 |
+
"loss": 0.2251,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.03931615129113197,
|
| 533 |
+
"learning_rate": 0.00469825921194564,
|
| 534 |
+
"loss": 0.2254,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.05020896717905998,
|
| 540 |
+
"learning_rate": 0.0046064234419363565,
|
| 541 |
+
"loss": 0.2236,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.04491092264652252,
|
| 547 |
+
"learning_rate": 0.0045142592829353,
|
| 548 |
+
"loss": 0.2218,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.03652544692158699,
|
| 554 |
+
"learning_rate": 0.004421816643460448,
|
| 555 |
+
"loss": 0.2115,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.046602796763181686,
|
| 561 |
+
"learning_rate": 0.004329145582831862,
|
| 562 |
+
"loss": 0.2262,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.03735148161649704,
|
| 568 |
+
"learning_rate": 0.0042362962840636915,
|
| 569 |
+
"loss": 0.2255,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.03894055634737015,
|
| 575 |
+
"learning_rate": 0.004143319026689178,
|
| 576 |
+
"loss": 0.2106,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.03984660282731056,
|
| 582 |
+
"learning_rate": 0.004050264159533411,
|
| 583 |
+
"loss": 0.2199,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.03481967747211456,
|
| 589 |
+
"learning_rate": 0.003957182073448542,
|
| 590 |
+
"loss": 0.2248,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.045008812099695206,
|
| 596 |
+
"learning_rate": 0.0038641231740262558,
|
| 597 |
+
"loss": 0.2215,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.03925010934472084,
|
| 603 |
+
"learning_rate": 0.003771137854302248,
|
| 604 |
+
"loss": 0.2198,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.040725499391555786,
|
| 610 |
+
"learning_rate": 0.003678276467467509,
|
| 611 |
+
"loss": 0.2112,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.03515653312206268,
|
| 617 |
+
"learning_rate": 0.003585589299601187,
|
| 618 |
+
"loss": 0.228,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.036875877529382706,
|
| 624 |
+
"learning_rate": 0.0034931265424397824,
|
| 625 |
+
"loss": 0.2197,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.040114812552928925,
|
| 631 |
+
"learning_rate": 0.003400938266197439,
|
| 632 |
+
"loss": 0.2107,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.037918511778116226,
|
| 638 |
+
"learning_rate": 0.0033090743924520435,
|
| 639 |
+
"loss": 0.2205,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.03853275254368782,
|
| 645 |
+
"learning_rate": 0.0032175846671118075,
|
| 646 |
+
"loss": 0.2123,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.038103483617305756,
|
| 652 |
+
"learning_rate": 0.003126518633476979,
|
| 653 |
+
"loss": 0.2161,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.03494865819811821,
|
| 659 |
+
"learning_rate": 0.003035925605411274,
|
| 660 |
+
"loss": 0.2189,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.03664889186620712,
|
| 666 |
+
"learning_rate": 0.00294585464063755,
|
| 667 |
+
"loss": 0.2108,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.04148728772997856,
|
| 673 |
+
"learning_rate": 0.002856354514172179,
|
| 674 |
+
"loss": 0.2338,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.04232242703437805,
|
| 680 |
+
"learning_rate": 0.002767473691912522,
|
| 681 |
+
"loss": 0.2186,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.04087619483470917,
|
| 687 |
+
"learning_rate": 0.002679260304391785,
|
| 688 |
+
"loss": 0.2111,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.03856271877884865,
|
| 694 |
+
"learning_rate": 0.002591762120715491,
|
| 695 |
+
"loss": 0.2123,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.04001619666814804,
|
| 701 |
+
"learning_rate": 0.002505026522693662,
|
| 702 |
+
"loss": 0.2177,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.03274156525731087,
|
| 708 |
+
"learning_rate": 0.002419100479182735,
|
| 709 |
+
"loss": 0.2137,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.04001650959253311,
|
| 715 |
+
"learning_rate": 0.0023340305206511008,
|
| 716 |
+
"loss": 0.1889,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.04055812954902649,
|
| 722 |
+
"learning_rate": 0.0022498627139820285,
|
| 723 |
+
"loss": 0.1836,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.03762733191251755,
|
| 729 |
+
"learning_rate": 0.0021666426375276404,
|
| 730 |
+
"loss": 0.1909,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.039257150143384933,
|
| 736 |
+
"learning_rate": 0.0020844153564274306,
|
| 737 |
+
"loss": 0.1822,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.0384918637573719,
|
| 743 |
+
"learning_rate": 0.0020032253982046892,
|
| 744 |
+
"loss": 0.1897,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.03713955730199814,
|
| 750 |
+
"learning_rate": 0.0019231167286540742,
|
| 751 |
+
"loss": 0.1836,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.034854769706726074,
|
| 757 |
+
"learning_rate": 0.0018441327280333511,
|
| 758 |
+
"loss": 0.1903,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.0406007245182991,
|
| 764 |
+
"learning_rate": 0.0017663161675722163,
|
| 765 |
+
"loss": 0.1867,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.03872333839535713,
|
| 771 |
+
"learning_rate": 0.0016897091863109304,
|
| 772 |
+
"loss": 0.1921,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.036862798035144806,
|
| 778 |
+
"learning_rate": 0.0016143532682812726,
|
| 779 |
+
"loss": 0.1891,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.03593632951378822,
|
| 785 |
+
"learning_rate": 0.001540289220042219,
|
| 786 |
+
"loss": 0.1831,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.034924060106277466,
|
| 792 |
+
"learning_rate": 0.0014675571485824577,
|
| 793 |
+
"loss": 0.1831,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.03330395743250847,
|
| 799 |
+
"learning_rate": 0.001396196439601757,
|
| 800 |
+
"loss": 0.1928,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.033493608236312866,
|
| 806 |
+
"learning_rate": 0.0013262457361829117,
|
| 807 |
+
"loss": 0.1812,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.033618029206991196,
|
| 813 |
+
"learning_rate": 0.001257742917865826,
|
| 814 |
+
"loss": 0.1854,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.03629603609442711,
|
| 820 |
+
"learning_rate": 0.0011907250801350831,
|
| 821 |
+
"loss": 0.191,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.030817655846476555,
|
| 827 |
+
"learning_rate": 0.0011252285143320826,
|
| 828 |
+
"loss": 0.1809,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.039837613701820374,
|
| 834 |
+
"learning_rate": 0.0010612886880026343,
|
| 835 |
+
"loss": 0.1881,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.034760717302560806,
|
| 841 |
+
"learning_rate": 0.000998940225690665,
|
| 842 |
+
"loss": 0.1811,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.03357568383216858,
|
| 848 |
+
"learning_rate": 0.0009382168901884094,
|
| 849 |
+
"loss": 0.1824,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.04034045711159706,
|
| 855 |
+
"learning_rate": 0.0008791515642532666,
|
| 856 |
+
"loss": 0.1842,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.033517543226480484,
|
| 862 |
+
"learning_rate": 0.0008217762328012066,
|
| 863 |
+
"loss": 0.187,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.03797078877687454,
|
| 869 |
+
"learning_rate": 0.0007661219655863753,
|
| 870 |
+
"loss": 0.1866,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.03740476444363594,
|
| 876 |
+
"learning_rate": 0.0007122189003762678,
|
| 877 |
+
"loss": 0.1801,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.04116297513246536,
|
| 883 |
+
"learning_rate": 0.0006600962266316053,
|
| 884 |
+
"loss": 0.1885,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.0396578311920166,
|
| 890 |
+
"learning_rate": 0.000609782169699721,
|
| 891 |
+
"loss": 0.18,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.04095142334699631,
|
| 897 |
+
"learning_rate": 0.000561303975530036,
|
| 898 |
+
"loss": 0.1843,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.035744231194257736,
|
| 904 |
+
"learning_rate": 0.0005146878959199013,
|
| 905 |
+
"loss": 0.1806,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.034600187093019485,
|
| 911 |
+
"learning_rate": 0.000469959174298785,
|
| 912 |
+
"loss": 0.1769,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.04248780012130737,
|
| 918 |
+
"learning_rate": 0.00042714203205850644,
|
| 919 |
+
"loss": 0.1778,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.03829626366496086,
|
| 925 |
+
"learning_rate": 0.00038625965543692865,
|
| 926 |
+
"loss": 0.1809,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.03590141609311104,
|
| 932 |
+
"learning_rate": 0.00034733418296220233,
|
| 933 |
+
"loss": 0.1798,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.04315745085477829,
|
| 939 |
+
"learning_rate": 0.00031038669346436043,
|
| 940 |
+
"loss": 0.1818,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.040417563170194626,
|
| 946 |
+
"learning_rate": 0.0002754371946607579,
|
| 947 |
+
"loss": 0.1848,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.039845407009124756,
|
| 953 |
+
"learning_rate": 0.00024250461232154397,
|
| 954 |
+
"loss": 0.1805,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.04121808707714081,
|
| 960 |
+
"learning_rate": 0.00021160678002102307,
|
| 961 |
+
"loss": 0.1923,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.02966867946088314,
|
| 967 |
+
"learning_rate": 0.00018276042948045523,
|
| 968 |
+
"loss": 0.182,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.032377708703279495,
|
| 974 |
+
"learning_rate": 0.00015598118150753982,
|
| 975 |
+
"loss": 0.1735,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.041362907737493515,
|
| 981 |
+
"learning_rate": 0.00013128353753746858,
|
| 982 |
+
"loss": 0.1779,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.034777916967868805,
|
| 988 |
+
"learning_rate": 0.00010868087178014462,
|
| 989 |
+
"loss": 0.1782,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.037447091192007065,
|
| 995 |
+
"learning_rate": 8.818542397781038e-05,
|
| 996 |
+
"loss": 0.1799,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.03927723690867424,
|
| 1002 |
+
"learning_rate": 6.980829277701295e-05,
|
| 1003 |
+
"loss": 0.1771,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.037519097328186035,
|
| 1009 |
+
"learning_rate": 5.355942971849004e-05,
|
| 1010 |
+
"loss": 0.1731,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.03824010491371155,
|
| 1016 |
+
"learning_rate": 3.94476338482348e-05,
|
| 1017 |
+
"loss": 0.182,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.03913429006934166,
|
| 1023 |
+
"learning_rate": 2.748054695265845e-05,
|
| 1024 |
+
"loss": 0.1824,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.04404062405228615,
|
| 1030 |
+
"learning_rate": 1.7664649420428003e-05,
|
| 1031 |
+
"loss": 0.1789,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.0337517075240612,
|
| 1037 |
+
"learning_rate": 1.0005256733218726e-05,
|
| 1038 |
+
"loss": 0.1739,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.03655869513750076,
|
| 1044 |
+
"learning_rate": 4.506516587291731e-06,
|
| 1045 |
+
"loss": 0.1764,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.032187193632125854,
|
| 1051 |
+
"learning_rate": 1.1714066474399764e-06,
|
| 1052 |
+
"loss": 0.1753,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.03194441273808479,
|
| 1058 |
+
"learning_rate": 1.7329345296523968e-09,
|
| 1059 |
+
"loss": 0.1811,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.238677699025472,
|
| 1067 |
+
"train_runtime": 3462.4523,
|
| 1068 |
+
"train_samples_per_second": 34.658,
|
| 1069 |
+
"train_steps_per_second": 1.083
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|
nl_tasks/exps/run_ex28/trainer_state.json
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 3750,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.02,
|
| 14 |
+
"grad_norm": 0.3354637026786804,
|
| 15 |
+
"learning_rate": 0.00128,
|
| 16 |
+
"loss": 0.5949,
|
| 17 |
+
"step": 25
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.04,
|
| 21 |
+
"grad_norm": 24.028608322143555,
|
| 22 |
+
"learning_rate": 0.002613333333333333,
|
| 23 |
+
"loss": 0.3875,
|
| 24 |
+
"step": 50
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.06,
|
| 28 |
+
"grad_norm": 0.8168126344680786,
|
| 29 |
+
"learning_rate": 0.003946666666666667,
|
| 30 |
+
"loss": 0.4411,
|
| 31 |
+
"step": 75
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.08,
|
| 35 |
+
"grad_norm": 0.4146294891834259,
|
| 36 |
+
"learning_rate": 0.00528,
|
| 37 |
+
"loss": 0.4422,
|
| 38 |
+
"step": 100
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.1,
|
| 42 |
+
"grad_norm": 0.3548749089241028,
|
| 43 |
+
"learning_rate": 0.006613333333333333,
|
| 44 |
+
"loss": 0.4081,
|
| 45 |
+
"step": 125
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.12,
|
| 49 |
+
"grad_norm": 0.46628203988075256,
|
| 50 |
+
"learning_rate": 0.007946666666666666,
|
| 51 |
+
"loss": 0.3749,
|
| 52 |
+
"step": 150
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.14,
|
| 56 |
+
"grad_norm": 3.237746477127075,
|
| 57 |
+
"learning_rate": 0.00928,
|
| 58 |
+
"loss": 0.3762,
|
| 59 |
+
"step": 175
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.16,
|
| 63 |
+
"grad_norm": 0.14871637523174286,
|
| 64 |
+
"learning_rate": 0.010613333333333332,
|
| 65 |
+
"loss": 0.3558,
|
| 66 |
+
"step": 200
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.18,
|
| 70 |
+
"grad_norm": 0.14990335702896118,
|
| 71 |
+
"learning_rate": 0.011946666666666668,
|
| 72 |
+
"loss": 0.3415,
|
| 73 |
+
"step": 225
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.2,
|
| 77 |
+
"grad_norm": 0.1260153353214264,
|
| 78 |
+
"learning_rate": 0.01328,
|
| 79 |
+
"loss": 0.3336,
|
| 80 |
+
"step": 250
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.22,
|
| 84 |
+
"grad_norm": 0.12185318768024445,
|
| 85 |
+
"learning_rate": 0.014613333333333334,
|
| 86 |
+
"loss": 0.3263,
|
| 87 |
+
"step": 275
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.24,
|
| 91 |
+
"grad_norm": 0.12580621242523193,
|
| 92 |
+
"learning_rate": 0.015946666666666668,
|
| 93 |
+
"loss": 0.3427,
|
| 94 |
+
"step": 300
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.26,
|
| 98 |
+
"grad_norm": 0.09728040546178818,
|
| 99 |
+
"learning_rate": 0.01728,
|
| 100 |
+
"loss": 0.3286,
|
| 101 |
+
"step": 325
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.28,
|
| 105 |
+
"grad_norm": 0.07153791934251785,
|
| 106 |
+
"learning_rate": 0.018613333333333332,
|
| 107 |
+
"loss": 0.3346,
|
| 108 |
+
"step": 350
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.3,
|
| 112 |
+
"grad_norm": 0.07144256681203842,
|
| 113 |
+
"learning_rate": 0.019946666666666665,
|
| 114 |
+
"loss": 0.3233,
|
| 115 |
+
"step": 375
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.32,
|
| 119 |
+
"grad_norm": 0.08008267730474472,
|
| 120 |
+
"learning_rate": 0.019997504677881223,
|
| 121 |
+
"loss": 0.3181,
|
| 122 |
+
"step": 400
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.34,
|
| 126 |
+
"grad_norm": 0.04643591120839119,
|
| 127 |
+
"learning_rate": 0.01998959986294895,
|
| 128 |
+
"loss": 0.304,
|
| 129 |
+
"step": 425
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.36,
|
| 133 |
+
"grad_norm": 0.05323820933699608,
|
| 134 |
+
"learning_rate": 0.01997628550346359,
|
| 135 |
+
"loss": 0.3053,
|
| 136 |
+
"step": 450
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.38,
|
| 140 |
+
"grad_norm": 0.049062035977840424,
|
| 141 |
+
"learning_rate": 0.019957568809385692,
|
| 142 |
+
"loss": 0.2969,
|
| 143 |
+
"step": 475
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.4,
|
| 147 |
+
"grad_norm": 0.06179765611886978,
|
| 148 |
+
"learning_rate": 0.019933459916135276,
|
| 149 |
+
"loss": 0.2954,
|
| 150 |
+
"step": 500
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.42,
|
| 154 |
+
"grad_norm": 0.04942788556218147,
|
| 155 |
+
"learning_rate": 0.019903971879103345,
|
| 156 |
+
"loss": 0.2957,
|
| 157 |
+
"step": 525
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.44,
|
| 161 |
+
"grad_norm": 0.05358466878533363,
|
| 162 |
+
"learning_rate": 0.01986912066658215,
|
| 163 |
+
"loss": 0.2813,
|
| 164 |
+
"step": 550
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.46,
|
| 168 |
+
"grad_norm": 0.046258535236120224,
|
| 169 |
+
"learning_rate": 0.019828925151118088,
|
| 170 |
+
"loss": 0.2995,
|
| 171 |
+
"step": 575
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.48,
|
| 175 |
+
"grad_norm": 0.04525147005915642,
|
| 176 |
+
"learning_rate": 0.019783407099291876,
|
| 177 |
+
"loss": 0.2839,
|
| 178 |
+
"step": 600
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.5,
|
| 182 |
+
"grad_norm": 0.04446304962038994,
|
| 183 |
+
"learning_rate": 0.019732591159931562,
|
| 184 |
+
"loss": 0.2961,
|
| 185 |
+
"step": 625
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.52,
|
| 189 |
+
"grad_norm": 0.04161718487739563,
|
| 190 |
+
"learning_rate": 0.019676504850764758,
|
| 191 |
+
"loss": 0.2866,
|
| 192 |
+
"step": 650
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.54,
|
| 196 |
+
"grad_norm": 0.03767893463373184,
|
| 197 |
+
"learning_rate": 0.0196151785435173,
|
| 198 |
+
"loss": 0.2761,
|
| 199 |
+
"step": 675
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.56,
|
| 203 |
+
"grad_norm": 0.04273810610175133,
|
| 204 |
+
"learning_rate": 0.019548645447466432,
|
| 205 |
+
"loss": 0.2767,
|
| 206 |
+
"step": 700
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.58,
|
| 210 |
+
"grad_norm": 0.0471462719142437,
|
| 211 |
+
"learning_rate": 0.01947694159145737,
|
| 212 |
+
"loss": 0.283,
|
| 213 |
+
"step": 725
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.6,
|
| 217 |
+
"grad_norm": 0.03773315250873566,
|
| 218 |
+
"learning_rate": 0.01940010580439308,
|
| 219 |
+
"loss": 0.2799,
|
| 220 |
+
"step": 750
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.62,
|
| 224 |
+
"grad_norm": 0.04052406921982765,
|
| 225 |
+
"learning_rate": 0.019318179694207724,
|
| 226 |
+
"loss": 0.2679,
|
| 227 |
+
"step": 775
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.64,
|
| 231 |
+
"grad_norm": 0.04028647765517235,
|
| 232 |
+
"learning_rate": 0.019231207625335235,
|
| 233 |
+
"loss": 0.2734,
|
| 234 |
+
"step": 800
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.66,
|
| 238 |
+
"grad_norm": 0.03707930073142052,
|
| 239 |
+
"learning_rate": 0.019139236694685185,
|
| 240 |
+
"loss": 0.2665,
|
| 241 |
+
"step": 825
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.68,
|
| 245 |
+
"grad_norm": 0.0416373685002327,
|
| 246 |
+
"learning_rate": 0.019042316706138988,
|
| 247 |
+
"loss": 0.2673,
|
| 248 |
+
"step": 850
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.7,
|
| 252 |
+
"grad_norm": 0.03203510865569115,
|
| 253 |
+
"learning_rate": 0.018940500143580216,
|
| 254 |
+
"loss": 0.2757,
|
| 255 |
+
"step": 875
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.72,
|
| 259 |
+
"grad_norm": 0.031195444986224174,
|
| 260 |
+
"learning_rate": 0.018833842142473644,
|
| 261 |
+
"loss": 0.2698,
|
| 262 |
+
"step": 900
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.74,
|
| 266 |
+
"grad_norm": 0.03926091641187668,
|
| 267 |
+
"learning_rate": 0.018722400460008438,
|
| 268 |
+
"loss": 0.2676,
|
| 269 |
+
"step": 925
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.76,
|
| 273 |
+
"grad_norm": 0.03447200357913971,
|
| 274 |
+
"learning_rate": 0.018606235443821603,
|
| 275 |
+
"loss": 0.2646,
|
| 276 |
+
"step": 950
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.78,
|
| 280 |
+
"grad_norm": 0.04425395652651787,
|
| 281 |
+
"learning_rate": 0.018485409999318678,
|
| 282 |
+
"loss": 0.2613,
|
| 283 |
+
"step": 975
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.8,
|
| 287 |
+
"grad_norm": 0.04347499832510948,
|
| 288 |
+
"learning_rate": 0.018359989555609354,
|
| 289 |
+
"loss": 0.2694,
|
| 290 |
+
"step": 1000
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.82,
|
| 294 |
+
"grad_norm": 0.039355143904685974,
|
| 295 |
+
"learning_rate": 0.018230042030076454,
|
| 296 |
+
"loss": 0.2672,
|
| 297 |
+
"step": 1025
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.84,
|
| 301 |
+
"grad_norm": 0.036026645451784134,
|
| 302 |
+
"learning_rate": 0.018095637791597462,
|
| 303 |
+
"loss": 0.2584,
|
| 304 |
+
"step": 1050
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.86,
|
| 308 |
+
"grad_norm": 0.03510862588882446,
|
| 309 |
+
"learning_rate": 0.017956849622438553,
|
| 310 |
+
"loss": 0.2547,
|
| 311 |
+
"step": 1075
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.88,
|
| 315 |
+
"grad_norm": 0.033159829676151276,
|
| 316 |
+
"learning_rate": 0.0178137526788417,
|
| 317 |
+
"loss": 0.255,
|
| 318 |
+
"step": 1100
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.9,
|
| 322 |
+
"grad_norm": 0.03295517340302467,
|
| 323 |
+
"learning_rate": 0.01766642445032625,
|
| 324 |
+
"loss": 0.2646,
|
| 325 |
+
"step": 1125
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.92,
|
| 329 |
+
"grad_norm": 0.029308080673217773,
|
| 330 |
+
"learning_rate": 0.017514944717726962,
|
| 331 |
+
"loss": 0.2576,
|
| 332 |
+
"step": 1150
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.94,
|
| 336 |
+
"grad_norm": 0.036943402141332626,
|
| 337 |
+
"learning_rate": 0.01735939550999131,
|
| 338 |
+
"loss": 0.2605,
|
| 339 |
+
"step": 1175
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.96,
|
| 343 |
+
"grad_norm": 0.02846529334783554,
|
| 344 |
+
"learning_rate": 0.017199861059759337,
|
| 345 |
+
"loss": 0.2594,
|
| 346 |
+
"step": 1200
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.98,
|
| 350 |
+
"grad_norm": 0.04283558577299118,
|
| 351 |
+
"learning_rate": 0.017036427757750203,
|
| 352 |
+
"loss": 0.2628,
|
| 353 |
+
"step": 1225
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 1.0,
|
| 357 |
+
"grad_norm": 0.03367648273706436,
|
| 358 |
+
"learning_rate": 0.016869184105980088,
|
| 359 |
+
"loss": 0.2529,
|
| 360 |
+
"step": 1250
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 1.02,
|
| 364 |
+
"grad_norm": 0.03456483781337738,
|
| 365 |
+
"learning_rate": 0.01669822066983678,
|
| 366 |
+
"loss": 0.2288,
|
| 367 |
+
"step": 1275
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 1.04,
|
| 371 |
+
"grad_norm": 0.024872206151485443,
|
| 372 |
+
"learning_rate": 0.01652363002903693,
|
| 373 |
+
"loss": 0.2375,
|
| 374 |
+
"step": 1300
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 1.06,
|
| 378 |
+
"grad_norm": 0.030464885756373405,
|
| 379 |
+
"learning_rate": 0.0163455067274925,
|
| 380 |
+
"loss": 0.2332,
|
| 381 |
+
"step": 1325
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 1.08,
|
| 385 |
+
"grad_norm": 0.031713806092739105,
|
| 386 |
+
"learning_rate": 0.016163947222113565,
|
| 387 |
+
"loss": 0.2398,
|
| 388 |
+
"step": 1350
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 1.1,
|
| 392 |
+
"grad_norm": 0.025640636682510376,
|
| 393 |
+
"learning_rate": 0.01597904983057519,
|
| 394 |
+
"loss": 0.2435,
|
| 395 |
+
"step": 1375
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 1.12,
|
| 399 |
+
"grad_norm": 0.02801099419593811,
|
| 400 |
+
"learning_rate": 0.01579091467807668,
|
| 401 |
+
"loss": 0.2423,
|
| 402 |
+
"step": 1400
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 1.1400000000000001,
|
| 406 |
+
"grad_norm": 0.03283185139298439,
|
| 407 |
+
"learning_rate": 0.01559964364312202,
|
| 408 |
+
"loss": 0.2334,
|
| 409 |
+
"step": 1425
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.16,
|
| 413 |
+
"grad_norm": 0.0458955243229866,
|
| 414 |
+
"learning_rate": 0.01540534030235087,
|
| 415 |
+
"loss": 0.2426,
|
| 416 |
+
"step": 1450
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.18,
|
| 420 |
+
"grad_norm": 0.030243318527936935,
|
| 421 |
+
"learning_rate": 0.015208109874449979,
|
| 422 |
+
"loss": 0.2383,
|
| 423 |
+
"step": 1475
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.2,
|
| 427 |
+
"grad_norm": 0.029557039961218834,
|
| 428 |
+
"learning_rate": 0.015008059163175401,
|
| 429 |
+
"loss": 0.2388,
|
| 430 |
+
"step": 1500
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.22,
|
| 434 |
+
"grad_norm": 0.028723224997520447,
|
| 435 |
+
"learning_rate": 0.014805296499516408,
|
| 436 |
+
"loss": 0.2434,
|
| 437 |
+
"step": 1525
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.24,
|
| 441 |
+
"grad_norm": 0.02488754130899906,
|
| 442 |
+
"learning_rate": 0.014599931683032325,
|
| 443 |
+
"loss": 0.2348,
|
| 444 |
+
"step": 1550
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.26,
|
| 448 |
+
"grad_norm": 0.025774620473384857,
|
| 449 |
+
"learning_rate": 0.014392075922394132,
|
| 450 |
+
"loss": 0.2327,
|
| 451 |
+
"step": 1575
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.28,
|
| 455 |
+
"grad_norm": 0.027344953268766403,
|
| 456 |
+
"learning_rate": 0.014181841775163014,
|
| 457 |
+
"loss": 0.2298,
|
| 458 |
+
"step": 1600
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.3,
|
| 462 |
+
"grad_norm": 0.02487209439277649,
|
| 463 |
+
"learning_rate": 0.013969343086838457,
|
| 464 |
+
"loss": 0.2299,
|
| 465 |
+
"step": 1625
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.32,
|
| 469 |
+
"grad_norm": 0.02767680585384369,
|
| 470 |
+
"learning_rate": 0.01375469492920889,
|
| 471 |
+
"loss": 0.2232,
|
| 472 |
+
"step": 1650
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.34,
|
| 476 |
+
"grad_norm": 0.023056739941239357,
|
| 477 |
+
"learning_rate": 0.013538013538038294,
|
| 478 |
+
"loss": 0.2287,
|
| 479 |
+
"step": 1675
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.3599999999999999,
|
| 483 |
+
"grad_norm": 0.027000591158866882,
|
| 484 |
+
"learning_rate": 0.013319416250122482,
|
| 485 |
+
"loss": 0.2288,
|
| 486 |
+
"step": 1700
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.38,
|
| 490 |
+
"grad_norm": 0.025275854393839836,
|
| 491 |
+
"learning_rate": 0.013099021439749154,
|
| 492 |
+
"loss": 0.2282,
|
| 493 |
+
"step": 1725
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.4,
|
| 497 |
+
"grad_norm": 0.028009621426463127,
|
| 498 |
+
"learning_rate": 0.012876948454596129,
|
| 499 |
+
"loss": 0.2288,
|
| 500 |
+
"step": 1750
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.42,
|
| 504 |
+
"grad_norm": 0.023568755015730858,
|
| 505 |
+
"learning_rate": 0.01265331755110247,
|
| 506 |
+
"loss": 0.2302,
|
| 507 |
+
"step": 1775
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.44,
|
| 511 |
+
"grad_norm": 0.029848851263523102,
|
| 512 |
+
"learning_rate": 0.01242824982934751,
|
| 513 |
+
"loss": 0.2176,
|
| 514 |
+
"step": 1800
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.46,
|
| 518 |
+
"grad_norm": 0.02378924749791622,
|
| 519 |
+
"learning_rate": 0.012201867167473015,
|
| 520 |
+
"loss": 0.2242,
|
| 521 |
+
"step": 1825
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.48,
|
| 525 |
+
"grad_norm": 0.030996032059192657,
|
| 526 |
+
"learning_rate": 0.01197429215568403,
|
| 527 |
+
"loss": 0.2275,
|
| 528 |
+
"step": 1850
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.5,
|
| 532 |
+
"grad_norm": 0.02629832550883293,
|
| 533 |
+
"learning_rate": 0.0117456480298641,
|
| 534 |
+
"loss": 0.2295,
|
| 535 |
+
"step": 1875
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.52,
|
| 539 |
+
"grad_norm": 0.02670891210436821,
|
| 540 |
+
"learning_rate": 0.011516058604840892,
|
| 541 |
+
"loss": 0.2272,
|
| 542 |
+
"step": 1900
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.54,
|
| 546 |
+
"grad_norm": 0.027848055586218834,
|
| 547 |
+
"learning_rate": 0.011285648207338251,
|
| 548 |
+
"loss": 0.225,
|
| 549 |
+
"step": 1925
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.56,
|
| 553 |
+
"grad_norm": 0.025103619322180748,
|
| 554 |
+
"learning_rate": 0.01105454160865112,
|
| 555 |
+
"loss": 0.2148,
|
| 556 |
+
"step": 1950
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.58,
|
| 560 |
+
"grad_norm": 0.02855285070836544,
|
| 561 |
+
"learning_rate": 0.010822863957079655,
|
| 562 |
+
"loss": 0.2292,
|
| 563 |
+
"step": 1975
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.6,
|
| 567 |
+
"grad_norm": 0.024564214050769806,
|
| 568 |
+
"learning_rate": 0.010590740710159228,
|
| 569 |
+
"loss": 0.2288,
|
| 570 |
+
"step": 2000
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.62,
|
| 574 |
+
"grad_norm": 0.029064735397696495,
|
| 575 |
+
"learning_rate": 0.010358297566722945,
|
| 576 |
+
"loss": 0.2128,
|
| 577 |
+
"step": 2025
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.6400000000000001,
|
| 581 |
+
"grad_norm": 0.024909919127821922,
|
| 582 |
+
"learning_rate": 0.010125660398833527,
|
| 583 |
+
"loss": 0.2235,
|
| 584 |
+
"step": 2050
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.6600000000000001,
|
| 588 |
+
"grad_norm": 0.022246485576033592,
|
| 589 |
+
"learning_rate": 0.009892955183621354,
|
| 590 |
+
"loss": 0.2283,
|
| 591 |
+
"step": 2075
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.6800000000000002,
|
| 595 |
+
"grad_norm": 0.024962959811091423,
|
| 596 |
+
"learning_rate": 0.00966030793506564,
|
| 597 |
+
"loss": 0.2243,
|
| 598 |
+
"step": 2100
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.7,
|
| 602 |
+
"grad_norm": 0.023296242579817772,
|
| 603 |
+
"learning_rate": 0.00942784463575562,
|
| 604 |
+
"loss": 0.2225,
|
| 605 |
+
"step": 2125
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.72,
|
| 609 |
+
"grad_norm": 0.026053672656416893,
|
| 610 |
+
"learning_rate": 0.009195691168668772,
|
| 611 |
+
"loss": 0.214,
|
| 612 |
+
"step": 2150
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.74,
|
| 616 |
+
"grad_norm": 0.022729799151420593,
|
| 617 |
+
"learning_rate": 0.008963973249002968,
|
| 618 |
+
"loss": 0.2309,
|
| 619 |
+
"step": 2175
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.76,
|
| 623 |
+
"grad_norm": 0.023254524916410446,
|
| 624 |
+
"learning_rate": 0.008732816356099456,
|
| 625 |
+
"loss": 0.2217,
|
| 626 |
+
"step": 2200
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.78,
|
| 630 |
+
"grad_norm": 0.026419617235660553,
|
| 631 |
+
"learning_rate": 0.008502345665493597,
|
| 632 |
+
"loss": 0.215,
|
| 633 |
+
"step": 2225
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.8,
|
| 637 |
+
"grad_norm": 0.02386222593486309,
|
| 638 |
+
"learning_rate": 0.00827268598113011,
|
| 639 |
+
"loss": 0.225,
|
| 640 |
+
"step": 2250
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.8199999999999998,
|
| 644 |
+
"grad_norm": 0.023376762866973877,
|
| 645 |
+
"learning_rate": 0.00804396166777952,
|
| 646 |
+
"loss": 0.2156,
|
| 647 |
+
"step": 2275
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.8399999999999999,
|
| 651 |
+
"grad_norm": 0.025676891207695007,
|
| 652 |
+
"learning_rate": 0.007816296583692449,
|
| 653 |
+
"loss": 0.2188,
|
| 654 |
+
"step": 2300
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.8599999999999999,
|
| 658 |
+
"grad_norm": 0.021596932783722878,
|
| 659 |
+
"learning_rate": 0.007589814013528186,
|
| 660 |
+
"loss": 0.2217,
|
| 661 |
+
"step": 2325
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.88,
|
| 665 |
+
"grad_norm": 0.02387806959450245,
|
| 666 |
+
"learning_rate": 0.0073646366015938745,
|
| 667 |
+
"loss": 0.2134,
|
| 668 |
+
"step": 2350
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.9,
|
| 672 |
+
"grad_norm": 0.02620845101773739,
|
| 673 |
+
"learning_rate": 0.007140886285430448,
|
| 674 |
+
"loss": 0.2373,
|
| 675 |
+
"step": 2375
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.92,
|
| 679 |
+
"grad_norm": 0.02476939558982849,
|
| 680 |
+
"learning_rate": 0.006918684229781305,
|
| 681 |
+
"loss": 0.2217,
|
| 682 |
+
"step": 2400
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.94,
|
| 686 |
+
"grad_norm": 0.026036998257040977,
|
| 687 |
+
"learning_rate": 0.0066981507609794625,
|
| 688 |
+
"loss": 0.2138,
|
| 689 |
+
"step": 2425
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.96,
|
| 693 |
+
"grad_norm": 0.023386968299746513,
|
| 694 |
+
"learning_rate": 0.006479405301788727,
|
| 695 |
+
"loss": 0.2157,
|
| 696 |
+
"step": 2450
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.98,
|
| 700 |
+
"grad_norm": 0.02384364791214466,
|
| 701 |
+
"learning_rate": 0.006262566306734154,
|
| 702 |
+
"loss": 0.2194,
|
| 703 |
+
"step": 2475
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 2.0,
|
| 707 |
+
"grad_norm": 0.02077922783792019,
|
| 708 |
+
"learning_rate": 0.006047751197956838,
|
| 709 |
+
"loss": 0.2168,
|
| 710 |
+
"step": 2500
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 2.02,
|
| 714 |
+
"grad_norm": 0.025426547974348068,
|
| 715 |
+
"learning_rate": 0.005835076301627752,
|
| 716 |
+
"loss": 0.1909,
|
| 717 |
+
"step": 2525
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 2.04,
|
| 721 |
+
"grad_norm": 0.025073401629924774,
|
| 722 |
+
"learning_rate": 0.005624656784955071,
|
| 723 |
+
"loss": 0.1848,
|
| 724 |
+
"step": 2550
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 2.06,
|
| 728 |
+
"grad_norm": 0.026666883379220963,
|
| 729 |
+
"learning_rate": 0.0054166065938191016,
|
| 730 |
+
"loss": 0.1931,
|
| 731 |
+
"step": 2575
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 2.08,
|
| 735 |
+
"grad_norm": 0.025660671293735504,
|
| 736 |
+
"learning_rate": 0.005211038391068576,
|
| 737 |
+
"loss": 0.1838,
|
| 738 |
+
"step": 2600
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 2.1,
|
| 742 |
+
"grad_norm": 0.02300131320953369,
|
| 743 |
+
"learning_rate": 0.005008063495511723,
|
| 744 |
+
"loss": 0.1932,
|
| 745 |
+
"step": 2625
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 2.12,
|
| 749 |
+
"grad_norm": 0.021838432177901268,
|
| 750 |
+
"learning_rate": 0.004807791821635186,
|
| 751 |
+
"loss": 0.1856,
|
| 752 |
+
"step": 2650
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 2.14,
|
| 756 |
+
"grad_norm": 0.02283412404358387,
|
| 757 |
+
"learning_rate": 0.004610331820083378,
|
| 758 |
+
"loss": 0.192,
|
| 759 |
+
"step": 2675
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 2.16,
|
| 763 |
+
"grad_norm": 0.024570999667048454,
|
| 764 |
+
"learning_rate": 0.004415790418930541,
|
| 765 |
+
"loss": 0.1874,
|
| 766 |
+
"step": 2700
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 2.18,
|
| 770 |
+
"grad_norm": 0.02300398424267769,
|
| 771 |
+
"learning_rate": 0.0042242729657773264,
|
| 772 |
+
"loss": 0.1953,
|
| 773 |
+
"step": 2725
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 2.2,
|
| 777 |
+
"grad_norm": 0.02675442397594452,
|
| 778 |
+
"learning_rate": 0.004035883170703182,
|
| 779 |
+
"loss": 0.1894,
|
| 780 |
+
"step": 2750
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 2.22,
|
| 784 |
+
"grad_norm": 0.023160850629210472,
|
| 785 |
+
"learning_rate": 0.003850723050105548,
|
| 786 |
+
"loss": 0.1844,
|
| 787 |
+
"step": 2775
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 2.24,
|
| 791 |
+
"grad_norm": 0.022288281470537186,
|
| 792 |
+
"learning_rate": 0.003668892871456144,
|
| 793 |
+
"loss": 0.1848,
|
| 794 |
+
"step": 2800
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 2.26,
|
| 798 |
+
"grad_norm": 0.02427896484732628,
|
| 799 |
+
"learning_rate": 0.0034904910990043924,
|
| 800 |
+
"loss": 0.1943,
|
| 801 |
+
"step": 2825
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 2.2800000000000002,
|
| 805 |
+
"grad_norm": 0.027167946100234985,
|
| 806 |
+
"learning_rate": 0.0033156143404572793,
|
| 807 |
+
"loss": 0.1838,
|
| 808 |
+
"step": 2850
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 2.3,
|
| 812 |
+
"grad_norm": 0.022306451573967934,
|
| 813 |
+
"learning_rate": 0.003144357294664565,
|
| 814 |
+
"loss": 0.1877,
|
| 815 |
+
"step": 2875
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.32,
|
| 819 |
+
"grad_norm": 0.02225615084171295,
|
| 820 |
+
"learning_rate": 0.0029768127003377077,
|
| 821 |
+
"loss": 0.1933,
|
| 822 |
+
"step": 2900
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.34,
|
| 826 |
+
"grad_norm": 0.02079549804329872,
|
| 827 |
+
"learning_rate": 0.0028130712858302066,
|
| 828 |
+
"loss": 0.1821,
|
| 829 |
+
"step": 2925
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.36,
|
| 833 |
+
"grad_norm": 0.026661023497581482,
|
| 834 |
+
"learning_rate": 0.0026532217200065855,
|
| 835 |
+
"loss": 0.1904,
|
| 836 |
+
"step": 2950
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.38,
|
| 840 |
+
"grad_norm": 0.02208436280488968,
|
| 841 |
+
"learning_rate": 0.0024973505642266625,
|
| 842 |
+
"loss": 0.1821,
|
| 843 |
+
"step": 2975
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.4,
|
| 847 |
+
"grad_norm": 0.022112136706709862,
|
| 848 |
+
"learning_rate": 0.0023455422254710233,
|
| 849 |
+
"loss": 0.1835,
|
| 850 |
+
"step": 3000
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.42,
|
| 854 |
+
"grad_norm": 0.027875550091266632,
|
| 855 |
+
"learning_rate": 0.0021978789106331664,
|
| 856 |
+
"loss": 0.1859,
|
| 857 |
+
"step": 3025
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.44,
|
| 861 |
+
"grad_norm": 0.021113188937306404,
|
| 862 |
+
"learning_rate": 0.0020544405820030164,
|
| 863 |
+
"loss": 0.1883,
|
| 864 |
+
"step": 3050
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.46,
|
| 868 |
+
"grad_norm": 0.023992350324988365,
|
| 869 |
+
"learning_rate": 0.0019153049139659384,
|
| 870 |
+
"loss": 0.1877,
|
| 871 |
+
"step": 3075
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.48,
|
| 875 |
+
"grad_norm": 0.024912724271416664,
|
| 876 |
+
"learning_rate": 0.0017805472509406694,
|
| 877 |
+
"loss": 0.182,
|
| 878 |
+
"step": 3100
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.5,
|
| 882 |
+
"grad_norm": 0.025338461622595787,
|
| 883 |
+
"learning_rate": 0.0016502405665790132,
|
| 884 |
+
"loss": 0.1901,
|
| 885 |
+
"step": 3125
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.52,
|
| 889 |
+
"grad_norm": 0.025324387475848198,
|
| 890 |
+
"learning_rate": 0.0015244554242493024,
|
| 891 |
+
"loss": 0.1816,
|
| 892 |
+
"step": 3150
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.54,
|
| 896 |
+
"grad_norm": 0.025014016777276993,
|
| 897 |
+
"learning_rate": 0.00140325993882509,
|
| 898 |
+
"loss": 0.1859,
|
| 899 |
+
"step": 3175
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.56,
|
| 903 |
+
"grad_norm": 0.020721474662423134,
|
| 904 |
+
"learning_rate": 0.0012867197397997531,
|
| 905 |
+
"loss": 0.1819,
|
| 906 |
+
"step": 3200
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.58,
|
| 910 |
+
"grad_norm": 0.022899964824318886,
|
| 911 |
+
"learning_rate": 0.0011748979357469624,
|
| 912 |
+
"loss": 0.1777,
|
| 913 |
+
"step": 3225
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.6,
|
| 917 |
+
"grad_norm": 0.025203322991728783,
|
| 918 |
+
"learning_rate": 0.001067855080146266,
|
| 919 |
+
"loss": 0.1796,
|
| 920 |
+
"step": 3250
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.62,
|
| 924 |
+
"grad_norm": 0.02526050992310047,
|
| 925 |
+
"learning_rate": 0.0009656491385923216,
|
| 926 |
+
"loss": 0.1811,
|
| 927 |
+
"step": 3275
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.64,
|
| 931 |
+
"grad_norm": 0.02366676740348339,
|
| 932 |
+
"learning_rate": 0.0008683354574055058,
|
| 933 |
+
"loss": 0.1805,
|
| 934 |
+
"step": 3300
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.66,
|
| 938 |
+
"grad_norm": 0.02300945669412613,
|
| 939 |
+
"learning_rate": 0.0007759667336609011,
|
| 940 |
+
"loss": 0.1833,
|
| 941 |
+
"step": 3325
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.68,
|
| 945 |
+
"grad_norm": 0.022977150976657867,
|
| 946 |
+
"learning_rate": 0.0006885929866518948,
|
| 947 |
+
"loss": 0.1852,
|
| 948 |
+
"step": 3350
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.7,
|
| 952 |
+
"grad_norm": 0.021491436287760735,
|
| 953 |
+
"learning_rate": 0.0006062615308038599,
|
| 954 |
+
"loss": 0.1818,
|
| 955 |
+
"step": 3375
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.7199999999999998,
|
| 959 |
+
"grad_norm": 0.028023768216371536,
|
| 960 |
+
"learning_rate": 0.0005290169500525577,
|
| 961 |
+
"loss": 0.1925,
|
| 962 |
+
"step": 3400
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.74,
|
| 966 |
+
"grad_norm": 0.020721763372421265,
|
| 967 |
+
"learning_rate": 0.0004569010737011381,
|
| 968 |
+
"loss": 0.1822,
|
| 969 |
+
"step": 3425
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.76,
|
| 973 |
+
"grad_norm": 0.021237611770629883,
|
| 974 |
+
"learning_rate": 0.00038995295376884954,
|
| 975 |
+
"loss": 0.1742,
|
| 976 |
+
"step": 3450
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.7800000000000002,
|
| 980 |
+
"grad_norm": 0.027307681739330292,
|
| 981 |
+
"learning_rate": 0.00032820884384367145,
|
| 982 |
+
"loss": 0.1784,
|
| 983 |
+
"step": 3475
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.8,
|
| 987 |
+
"grad_norm": 0.027936382219195366,
|
| 988 |
+
"learning_rate": 0.0002717021794503616,
|
| 989 |
+
"loss": 0.1798,
|
| 990 |
+
"step": 3500
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.82,
|
| 994 |
+
"grad_norm": 0.022965189069509506,
|
| 995 |
+
"learning_rate": 0.00022046355994452595,
|
| 996 |
+
"loss": 0.1811,
|
| 997 |
+
"step": 3525
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.84,
|
| 1001 |
+
"grad_norm": 0.028826095163822174,
|
| 1002 |
+
"learning_rate": 0.00017452073194253238,
|
| 1003 |
+
"loss": 0.1772,
|
| 1004 |
+
"step": 3550
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.86,
|
| 1008 |
+
"grad_norm": 0.025759048759937286,
|
| 1009 |
+
"learning_rate": 0.0001338985742962251,
|
| 1010 |
+
"loss": 0.1745,
|
| 1011 |
+
"step": 3575
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.88,
|
| 1015 |
+
"grad_norm": 0.023553024977445602,
|
| 1016 |
+
"learning_rate": 9.8619084620587e-05,
|
| 1017 |
+
"loss": 0.1828,
|
| 1018 |
+
"step": 3600
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.9,
|
| 1022 |
+
"grad_norm": 0.026909027248620987,
|
| 1023 |
+
"learning_rate": 6.870136738164612e-05,
|
| 1024 |
+
"loss": 0.1831,
|
| 1025 |
+
"step": 3625
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.92,
|
| 1029 |
+
"grad_norm": 0.027830779552459717,
|
| 1030 |
+
"learning_rate": 4.416162355107001e-05,
|
| 1031 |
+
"loss": 0.1796,
|
| 1032 |
+
"step": 3650
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.94,
|
| 1036 |
+
"grad_norm": 0.02305370755493641,
|
| 1037 |
+
"learning_rate": 2.5013141833046815e-05,
|
| 1038 |
+
"loss": 0.1748,
|
| 1039 |
+
"step": 3675
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.96,
|
| 1043 |
+
"grad_norm": 0.02431248314678669,
|
| 1044 |
+
"learning_rate": 1.1266291468229328e-05,
|
| 1045 |
+
"loss": 0.1767,
|
| 1046 |
+
"step": 3700
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.98,
|
| 1050 |
+
"grad_norm": 0.023445066064596176,
|
| 1051 |
+
"learning_rate": 2.928516618599941e-06,
|
| 1052 |
+
"loss": 0.1756,
|
| 1053 |
+
"step": 3725
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 3.0,
|
| 1057 |
+
"grad_norm": 0.020654326304793358,
|
| 1058 |
+
"learning_rate": 4.332336324130992e-09,
|
| 1059 |
+
"loss": 0.1814,
|
| 1060 |
+
"step": 3750
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 3.0,
|
| 1064 |
+
"step": 3750,
|
| 1065 |
+
"total_flos": 2.43882352705536e+18,
|
| 1066 |
+
"train_loss": 0.23951186167399088,
|
| 1067 |
+
"train_runtime": 3462.7911,
|
| 1068 |
+
"train_samples_per_second": 34.654,
|
| 1069 |
+
"train_steps_per_second": 1.083
|
| 1070 |
+
}
|
| 1071 |
+
],
|
| 1072 |
+
"logging_steps": 25,
|
| 1073 |
+
"max_steps": 3750,
|
| 1074 |
+
"num_input_tokens_seen": 0,
|
| 1075 |
+
"num_train_epochs": 3,
|
| 1076 |
+
"save_steps": 0,
|
| 1077 |
+
"stateful_callbacks": {
|
| 1078 |
+
"TrainerControl": {
|
| 1079 |
+
"args": {
|
| 1080 |
+
"should_epoch_stop": false,
|
| 1081 |
+
"should_evaluate": false,
|
| 1082 |
+
"should_log": false,
|
| 1083 |
+
"should_save": true,
|
| 1084 |
+
"should_training_stop": true
|
| 1085 |
+
},
|
| 1086 |
+
"attributes": {}
|
| 1087 |
+
}
|
| 1088 |
+
},
|
| 1089 |
+
"total_flos": 2.43882352705536e+18,
|
| 1090 |
+
"train_batch_size": 32,
|
| 1091 |
+
"trial_name": null,
|
| 1092 |
+
"trial_params": null
|
| 1093 |
+
}
|