Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- .ipynb_checkpoints/README-checkpoint.md +69 -0
- README.md +69 -0
- all_results.json +9 -0
- chat_template.jinja +154 -0
- config.json +113 -0
- generation_config.json +10 -0
- model.safetensors +3 -0
- processor_config.json +60 -0
- tokenizer.json +3 -0
- tokenizer_config.json +33 -0
- train_results.json +9 -0
- trainer_log.jsonl +175 -0
- trainer_state.json +1261 -0
- training_args.bin +3 -0
- training_loss.png +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
.ipynb_checkpoints/README-checkpoint.md
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: other
|
| 4 |
+
base_model: Qwen3.5-9B
|
| 5 |
+
tags:
|
| 6 |
+
- llama-factory
|
| 7 |
+
- full
|
| 8 |
+
- generated_from_trainer
|
| 9 |
+
model-index:
|
| 10 |
+
- name: qwen35_caption_galore
|
| 11 |
+
results: []
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
+
|
| 17 |
+
# qwen35_caption_galore
|
| 18 |
+
|
| 19 |
+
This model is a fine-tuned version of [/workspace/models/Qwen3.5-9B](https://huggingface.co//workspace/models/Qwen3.5-9B) on the my_caption dataset.
|
| 20 |
+
|
| 21 |
+
## Model description
|
| 22 |
+
|
| 23 |
+
More information needed
|
| 24 |
+
|
| 25 |
+
## Intended uses & limitations
|
| 26 |
+
|
| 27 |
+
More information needed
|
| 28 |
+
|
| 29 |
+
## Training and evaluation data
|
| 30 |
+
|
| 31 |
+
More information needed
|
| 32 |
+
|
| 33 |
+
## Training procedure
|
| 34 |
+
|
| 35 |
+
### Training hyperparameters
|
| 36 |
+
|
| 37 |
+
The following hyperparameters were used during training:
|
| 38 |
+
- family_to_muon_lr = {
|
| 39 |
+
"language": _fallback(getattr(training_args, "language_muon_lr", 3e-5), language_lr),
|
| 40 |
+
"vision": _fallback(getattr(training_args, "vision_muon_lr", 3e-5), vision_lr),
|
| 41 |
+
"merger": _fallback(getattr(training_args, "merger_muon_lr", 5e-5), merger_lr),
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
family_to_adamw_lr = {
|
| 45 |
+
"language": _fallback(getattr(training_args, "language_adamw_lr", 3e-5), language_lr),
|
| 46 |
+
"vision": _fallback(getattr(training_args, "vision_adamw_lr", 3e-6), vision_lr),
|
| 47 |
+
"merger": _fallback(getattr(training_args, "merger_adamw_lr", 5e-5), merger_lr),
|
| 48 |
+
}
|
| 49 |
+
- train_batch_size: 3
|
| 50 |
+
- eval_batch_size: 8
|
| 51 |
+
- seed: 42
|
| 52 |
+
- distributed_type: multi-GPU
|
| 53 |
+
- gradient_accumulation_steps: 40
|
| 54 |
+
- total_train_batch_size: 120
|
| 55 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 56 |
+
- lr_scheduler_type: cosine_with_min_lr
|
| 57 |
+
- lr_scheduler_warmup_steps: 0.05
|
| 58 |
+
- num_epochs: 3
|
| 59 |
+
|
| 60 |
+
### Training results
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
### Framework versions
|
| 65 |
+
|
| 66 |
+
- Transformers 5.5.3
|
| 67 |
+
- Pytorch 2.11.0+cu130
|
| 68 |
+
- Datasets 4.0.0
|
| 69 |
+
- Tokenizers 0.22.2
|
README.md
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: other
|
| 4 |
+
base_model: Qwen3.5-9B
|
| 5 |
+
tags:
|
| 6 |
+
- llama-factory
|
| 7 |
+
- full
|
| 8 |
+
- generated_from_trainer
|
| 9 |
+
model-index:
|
| 10 |
+
- name: qwen35_caption_galore
|
| 11 |
+
results: []
|
| 12 |
+
---
|
| 13 |
+
|
| 14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
+
|
| 17 |
+
# qwen35_caption_galore
|
| 18 |
+
|
| 19 |
+
This model is a fine-tuned version of [/workspace/models/Qwen3.5-9B](https://huggingface.co//workspace/models/Qwen3.5-9B) on the my_caption dataset.
|
| 20 |
+
|
| 21 |
+
## Model description
|
| 22 |
+
|
| 23 |
+
More information needed
|
| 24 |
+
|
| 25 |
+
## Intended uses & limitations
|
| 26 |
+
|
| 27 |
+
More information needed
|
| 28 |
+
|
| 29 |
+
## Training and evaluation data
|
| 30 |
+
|
| 31 |
+
More information needed
|
| 32 |
+
|
| 33 |
+
## Training procedure
|
| 34 |
+
|
| 35 |
+
### Training hyperparameters
|
| 36 |
+
|
| 37 |
+
The following hyperparameters were used during training:
|
| 38 |
+
- family_to_muon_lr = {
|
| 39 |
+
"language": _fallback(getattr(training_args, "language_muon_lr", 3e-5), language_lr),
|
| 40 |
+
"vision": _fallback(getattr(training_args, "vision_muon_lr", 3e-5), vision_lr),
|
| 41 |
+
"merger": _fallback(getattr(training_args, "merger_muon_lr", 5e-5), merger_lr),
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
family_to_adamw_lr = {
|
| 45 |
+
"language": _fallback(getattr(training_args, "language_adamw_lr", 3e-5), language_lr),
|
| 46 |
+
"vision": _fallback(getattr(training_args, "vision_adamw_lr", 3e-6), vision_lr),
|
| 47 |
+
"merger": _fallback(getattr(training_args, "merger_adamw_lr", 5e-5), merger_lr),
|
| 48 |
+
}
|
| 49 |
+
- train_batch_size: 3
|
| 50 |
+
- eval_batch_size: 8
|
| 51 |
+
- seed: 42
|
| 52 |
+
- distributed_type: multi-GPU
|
| 53 |
+
- gradient_accumulation_steps: 40
|
| 54 |
+
- total_train_batch_size: 120
|
| 55 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 56 |
+
- lr_scheduler_type: cosine_with_min_lr
|
| 57 |
+
- lr_scheduler_warmup_steps: 0.05
|
| 58 |
+
- num_epochs: 3
|
| 59 |
+
|
| 60 |
+
### Training results
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
### Framework versions
|
| 65 |
+
|
| 66 |
+
- Transformers 5.5.3
|
| 67 |
+
- Pytorch 2.11.0+cu130
|
| 68 |
+
- Datasets 4.0.0
|
| 69 |
+
- Tokenizers 0.22.2
|
all_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"effective_tokens_per_sec": 7728.355210837788,
|
| 3 |
+
"epoch": 3.0,
|
| 4 |
+
"total_flos": 2.7756656592022405e+18,
|
| 5 |
+
"train_loss": 1.0638441426315526,
|
| 6 |
+
"train_runtime": 6969.7468,
|
| 7 |
+
"train_samples_per_second": 5.992,
|
| 8 |
+
"train_steps_per_second": 0.05
|
| 9 |
+
}
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{%- set image_count = namespace(value=0) %}
|
| 2 |
+
{%- set video_count = namespace(value=0) %}
|
| 3 |
+
{%- macro render_content(content, do_vision_count, is_system_content=false) %}
|
| 4 |
+
{%- if content is string %}
|
| 5 |
+
{{- content }}
|
| 6 |
+
{%- elif content is iterable and content is not mapping %}
|
| 7 |
+
{%- for item in content %}
|
| 8 |
+
{%- if 'image' in item or 'image_url' in item or item.type == 'image' %}
|
| 9 |
+
{%- if is_system_content %}
|
| 10 |
+
{{- raise_exception('System message cannot contain images.') }}
|
| 11 |
+
{%- endif %}
|
| 12 |
+
{%- if do_vision_count %}
|
| 13 |
+
{%- set image_count.value = image_count.value + 1 %}
|
| 14 |
+
{%- endif %}
|
| 15 |
+
{%- if add_vision_id %}
|
| 16 |
+
{{- 'Picture ' ~ image_count.value ~ ': ' }}
|
| 17 |
+
{%- endif %}
|
| 18 |
+
{{- '<|vision_start|><|image_pad|><|vision_end|>' }}
|
| 19 |
+
{%- elif 'video' in item or item.type == 'video' %}
|
| 20 |
+
{%- if is_system_content %}
|
| 21 |
+
{{- raise_exception('System message cannot contain videos.') }}
|
| 22 |
+
{%- endif %}
|
| 23 |
+
{%- if do_vision_count %}
|
| 24 |
+
{%- set video_count.value = video_count.value + 1 %}
|
| 25 |
+
{%- endif %}
|
| 26 |
+
{%- if add_vision_id %}
|
| 27 |
+
{{- 'Video ' ~ video_count.value ~ ': ' }}
|
| 28 |
+
{%- endif %}
|
| 29 |
+
{{- '<|vision_start|><|video_pad|><|vision_end|>' }}
|
| 30 |
+
{%- elif 'text' in item %}
|
| 31 |
+
{{- item.text }}
|
| 32 |
+
{%- else %}
|
| 33 |
+
{{- raise_exception('Unexpected item type in content.') }}
|
| 34 |
+
{%- endif %}
|
| 35 |
+
{%- endfor %}
|
| 36 |
+
{%- elif content is none or content is undefined %}
|
| 37 |
+
{{- '' }}
|
| 38 |
+
{%- else %}
|
| 39 |
+
{{- raise_exception('Unexpected content type.') }}
|
| 40 |
+
{%- endif %}
|
| 41 |
+
{%- endmacro %}
|
| 42 |
+
{%- if not messages %}
|
| 43 |
+
{{- raise_exception('No messages provided.') }}
|
| 44 |
+
{%- endif %}
|
| 45 |
+
{%- if tools and tools is iterable and tools is not mapping %}
|
| 46 |
+
{{- '<|im_start|>system\n' }}
|
| 47 |
+
{{- "# Tools\n\nYou have access to the following functions:\n\n<tools>" }}
|
| 48 |
+
{%- for tool in tools %}
|
| 49 |
+
{{- "\n" }}
|
| 50 |
+
{{- tool | tojson }}
|
| 51 |
+
{%- endfor %}
|
| 52 |
+
{{- "\n</tools>" }}
|
| 53 |
+
{{- '\n\nIf you choose to call a function ONLY reply in the following format with NO suffix:\n\n<tool_call>\n<function=example_function_name>\n<parameter=example_parameter_1>\nvalue_1\n</parameter>\n<parameter=example_parameter_2>\nThis is the value for the second parameter\nthat can span\nmultiple lines\n</parameter>\n</function>\n</tool_call>\n\n<IMPORTANT>\nReminder:\n- Function calls MUST follow the specified format: an inner <function=...></function> block must be nested within <tool_call></tool_call> XML tags\n- Required parameters MUST be specified\n- You may provide optional reasoning for your function call in natural language BEFORE the function call, but NOT after\n- If there is no function call available, answer the question like normal with your current knowledge and do not tell the user about function calls\n</IMPORTANT>' }}
|
| 54 |
+
{%- if messages[0].role == 'system' %}
|
| 55 |
+
{%- set content = render_content(messages[0].content, false, true)|trim %}
|
| 56 |
+
{%- if content %}
|
| 57 |
+
{{- '\n\n' + content }}
|
| 58 |
+
{%- endif %}
|
| 59 |
+
{%- endif %}
|
| 60 |
+
{{- '<|im_end|>\n' }}
|
| 61 |
+
{%- else %}
|
| 62 |
+
{%- if messages[0].role == 'system' %}
|
| 63 |
+
{%- set content = render_content(messages[0].content, false, true)|trim %}
|
| 64 |
+
{{- '<|im_start|>system\n' + content + '<|im_end|>\n' }}
|
| 65 |
+
{%- endif %}
|
| 66 |
+
{%- endif %}
|
| 67 |
+
{%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
|
| 68 |
+
{%- for message in messages[::-1] %}
|
| 69 |
+
{%- set index = (messages|length - 1) - loop.index0 %}
|
| 70 |
+
{%- if ns.multi_step_tool and message.role == "user" %}
|
| 71 |
+
{%- set content = render_content(message.content, false)|trim %}
|
| 72 |
+
{%- if not(content.startswith('<tool_response>') and content.endswith('</tool_response>')) %}
|
| 73 |
+
{%- set ns.multi_step_tool = false %}
|
| 74 |
+
{%- set ns.last_query_index = index %}
|
| 75 |
+
{%- endif %}
|
| 76 |
+
{%- endif %}
|
| 77 |
+
{%- endfor %}
|
| 78 |
+
{%- if ns.multi_step_tool %}
|
| 79 |
+
{{- raise_exception('No user query found in messages.') }}
|
| 80 |
+
{%- endif %}
|
| 81 |
+
{%- for message in messages %}
|
| 82 |
+
{%- set content = render_content(message.content, true)|trim %}
|
| 83 |
+
{%- if message.role == "system" %}
|
| 84 |
+
{%- if not loop.first %}
|
| 85 |
+
{{- raise_exception('System message must be at the beginning.') }}
|
| 86 |
+
{%- endif %}
|
| 87 |
+
{%- elif message.role == "user" %}
|
| 88 |
+
{{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
|
| 89 |
+
{%- elif message.role == "assistant" %}
|
| 90 |
+
{%- set reasoning_content = '' %}
|
| 91 |
+
{%- if message.reasoning_content is string %}
|
| 92 |
+
{%- set reasoning_content = message.reasoning_content %}
|
| 93 |
+
{%- else %}
|
| 94 |
+
{%- if '</think>' in content %}
|
| 95 |
+
{%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
|
| 96 |
+
{%- set content = content.split('</think>')[-1].lstrip('\n') %}
|
| 97 |
+
{%- endif %}
|
| 98 |
+
{%- endif %}
|
| 99 |
+
{%- set reasoning_content = reasoning_content|trim %}
|
| 100 |
+
{%- if loop.index0 > ns.last_query_index %}
|
| 101 |
+
{{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content + '\n</think>\n\n' + content }}
|
| 102 |
+
{%- else %}
|
| 103 |
+
{{- '<|im_start|>' + message.role + '\n' + content }}
|
| 104 |
+
{%- endif %}
|
| 105 |
+
{%- if message.tool_calls and message.tool_calls is iterable and message.tool_calls is not mapping %}
|
| 106 |
+
{%- for tool_call in message.tool_calls %}
|
| 107 |
+
{%- if tool_call.function is defined %}
|
| 108 |
+
{%- set tool_call = tool_call.function %}
|
| 109 |
+
{%- endif %}
|
| 110 |
+
{%- if loop.first %}
|
| 111 |
+
{%- if content|trim %}
|
| 112 |
+
{{- '\n\n<tool_call>\n<function=' + tool_call.name + '>\n' }}
|
| 113 |
+
{%- else %}
|
| 114 |
+
{{- '<tool_call>\n<function=' + tool_call.name + '>\n' }}
|
| 115 |
+
{%- endif %}
|
| 116 |
+
{%- else %}
|
| 117 |
+
{{- '\n<tool_call>\n<function=' + tool_call.name + '>\n' }}
|
| 118 |
+
{%- endif %}
|
| 119 |
+
{%- if tool_call.arguments is defined %}
|
| 120 |
+
{%- for args_name, args_value in tool_call.arguments|items %}
|
| 121 |
+
{{- '<parameter=' + args_name + '>\n' }}
|
| 122 |
+
{%- set args_value = args_value | tojson | safe if args_value is mapping or (args_value is sequence and args_value is not string) else args_value | string %}
|
| 123 |
+
{{- args_value }}
|
| 124 |
+
{{- '\n</parameter>\n' }}
|
| 125 |
+
{%- endfor %}
|
| 126 |
+
{%- endif %}
|
| 127 |
+
{{- '</function>\n</tool_call>' }}
|
| 128 |
+
{%- endfor %}
|
| 129 |
+
{%- endif %}
|
| 130 |
+
{{- '<|im_end|>\n' }}
|
| 131 |
+
{%- elif message.role == "tool" %}
|
| 132 |
+
{%- if loop.previtem and loop.previtem.role != "tool" %}
|
| 133 |
+
{{- '<|im_start|>user' }}
|
| 134 |
+
{%- endif %}
|
| 135 |
+
{{- '\n<tool_response>\n' }}
|
| 136 |
+
{{- content }}
|
| 137 |
+
{{- '\n</tool_response>' }}
|
| 138 |
+
{%- if not loop.last and loop.nextitem.role != "tool" %}
|
| 139 |
+
{{- '<|im_end|>\n' }}
|
| 140 |
+
{%- elif loop.last %}
|
| 141 |
+
{{- '<|im_end|>\n' }}
|
| 142 |
+
{%- endif %}
|
| 143 |
+
{%- else %}
|
| 144 |
+
{{- raise_exception('Unexpected message role.') }}
|
| 145 |
+
{%- endif %}
|
| 146 |
+
{%- endfor %}
|
| 147 |
+
{%- if add_generation_prompt %}
|
| 148 |
+
{{- '<|im_start|>assistant\n' }}
|
| 149 |
+
{%- if enable_thinking is defined and enable_thinking is false %}
|
| 150 |
+
{{- '<think>\n\n</think>\n\n' }}
|
| 151 |
+
{%- else %}
|
| 152 |
+
{{- '<think>\n' }}
|
| 153 |
+
{%- endif %}
|
| 154 |
+
{%- endif %}
|
config.json
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen3_5ForConditionalGeneration"
|
| 4 |
+
],
|
| 5 |
+
"dtype": "bfloat16",
|
| 6 |
+
"eos_token_id": 248046,
|
| 7 |
+
"hidden_size": 4096,
|
| 8 |
+
"image_token_id": 248056,
|
| 9 |
+
"model_type": "qwen3_5",
|
| 10 |
+
"pad_token_id": 248044,
|
| 11 |
+
"text_config": {
|
| 12 |
+
"attention_bias": false,
|
| 13 |
+
"attention_dropout": 0.0,
|
| 14 |
+
"attn_output_gate": true,
|
| 15 |
+
"bos_token_id": null,
|
| 16 |
+
"dtype": "bfloat16",
|
| 17 |
+
"eos_token_id": 248044,
|
| 18 |
+
"full_attention_interval": 4,
|
| 19 |
+
"head_dim": 256,
|
| 20 |
+
"hidden_act": "silu",
|
| 21 |
+
"hidden_size": 4096,
|
| 22 |
+
"initializer_range": 0.02,
|
| 23 |
+
"intermediate_size": 12288,
|
| 24 |
+
"layer_types": [
|
| 25 |
+
"linear_attention",
|
| 26 |
+
"linear_attention",
|
| 27 |
+
"linear_attention",
|
| 28 |
+
"full_attention",
|
| 29 |
+
"linear_attention",
|
| 30 |
+
"linear_attention",
|
| 31 |
+
"linear_attention",
|
| 32 |
+
"full_attention",
|
| 33 |
+
"linear_attention",
|
| 34 |
+
"linear_attention",
|
| 35 |
+
"linear_attention",
|
| 36 |
+
"full_attention",
|
| 37 |
+
"linear_attention",
|
| 38 |
+
"linear_attention",
|
| 39 |
+
"linear_attention",
|
| 40 |
+
"full_attention",
|
| 41 |
+
"linear_attention",
|
| 42 |
+
"linear_attention",
|
| 43 |
+
"linear_attention",
|
| 44 |
+
"full_attention",
|
| 45 |
+
"linear_attention",
|
| 46 |
+
"linear_attention",
|
| 47 |
+
"linear_attention",
|
| 48 |
+
"full_attention",
|
| 49 |
+
"linear_attention",
|
| 50 |
+
"linear_attention",
|
| 51 |
+
"linear_attention",
|
| 52 |
+
"full_attention",
|
| 53 |
+
"linear_attention",
|
| 54 |
+
"linear_attention",
|
| 55 |
+
"linear_attention",
|
| 56 |
+
"full_attention"
|
| 57 |
+
],
|
| 58 |
+
"linear_conv_kernel_dim": 4,
|
| 59 |
+
"linear_key_head_dim": 128,
|
| 60 |
+
"linear_num_key_heads": 16,
|
| 61 |
+
"linear_num_value_heads": 32,
|
| 62 |
+
"linear_value_head_dim": 128,
|
| 63 |
+
"mamba_ssm_dtype": "float32",
|
| 64 |
+
"max_position_embeddings": 262144,
|
| 65 |
+
"mlp_only_layers": [],
|
| 66 |
+
"model_type": "qwen3_5_text",
|
| 67 |
+
"mtp_num_hidden_layers": 1,
|
| 68 |
+
"mtp_use_dedicated_embeddings": false,
|
| 69 |
+
"num_attention_heads": 16,
|
| 70 |
+
"num_hidden_layers": 32,
|
| 71 |
+
"num_key_value_heads": 4,
|
| 72 |
+
"pad_token_id": null,
|
| 73 |
+
"partial_rotary_factor": 0.25,
|
| 74 |
+
"rms_norm_eps": 1e-06,
|
| 75 |
+
"rope_parameters": {
|
| 76 |
+
"mrope_interleaved": true,
|
| 77 |
+
"mrope_section": [
|
| 78 |
+
11,
|
| 79 |
+
11,
|
| 80 |
+
10
|
| 81 |
+
],
|
| 82 |
+
"partial_rotary_factor": 0.25,
|
| 83 |
+
"rope_theta": 10000000,
|
| 84 |
+
"rope_type": "default"
|
| 85 |
+
},
|
| 86 |
+
"tie_word_embeddings": false,
|
| 87 |
+
"use_cache": false,
|
| 88 |
+
"vocab_size": 248320
|
| 89 |
+
},
|
| 90 |
+
"tie_word_embeddings": false,
|
| 91 |
+
"transformers_version": "5.5.3",
|
| 92 |
+
"use_cache": false,
|
| 93 |
+
"video_token_id": 248057,
|
| 94 |
+
"vision_config": {
|
| 95 |
+
"deepstack_visual_indexes": [],
|
| 96 |
+
"depth": 27,
|
| 97 |
+
"dtype": "bfloat16",
|
| 98 |
+
"hidden_act": "gelu_pytorch_tanh",
|
| 99 |
+
"hidden_size": 1152,
|
| 100 |
+
"in_channels": 3,
|
| 101 |
+
"initializer_range": 0.02,
|
| 102 |
+
"intermediate_size": 4304,
|
| 103 |
+
"model_type": "qwen3_5",
|
| 104 |
+
"num_heads": 16,
|
| 105 |
+
"num_position_embeddings": 2304,
|
| 106 |
+
"out_hidden_size": 4096,
|
| 107 |
+
"patch_size": 16,
|
| 108 |
+
"spatial_merge_size": 2,
|
| 109 |
+
"temporal_patch_size": 2
|
| 110 |
+
},
|
| 111 |
+
"vision_end_token_id": 248054,
|
| 112 |
+
"vision_start_token_id": 248053
|
| 113 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"eos_token_id": [
|
| 4 |
+
248046,
|
| 5 |
+
248044
|
| 6 |
+
],
|
| 7 |
+
"pad_token_id": 248044,
|
| 8 |
+
"transformers_version": "5.5.3",
|
| 9 |
+
"use_cache": true
|
| 10 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7039309823637bff46982be586f10006c36d61a47dbdce733fcda9a97b886cc1
|
| 3 |
+
size 20859273368
|
processor_config.json
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"image_processor": {
|
| 3 |
+
"do_convert_rgb": true,
|
| 4 |
+
"do_normalize": true,
|
| 5 |
+
"do_rescale": true,
|
| 6 |
+
"do_resize": true,
|
| 7 |
+
"image_mean": [
|
| 8 |
+
0.5,
|
| 9 |
+
0.5,
|
| 10 |
+
0.5
|
| 11 |
+
],
|
| 12 |
+
"image_processor_type": "Qwen2VLImageProcessor",
|
| 13 |
+
"image_std": [
|
| 14 |
+
0.5,
|
| 15 |
+
0.5,
|
| 16 |
+
0.5
|
| 17 |
+
],
|
| 18 |
+
"merge_size": 2,
|
| 19 |
+
"patch_size": 16,
|
| 20 |
+
"resample": 3,
|
| 21 |
+
"rescale_factor": 0.00392156862745098,
|
| 22 |
+
"size": {
|
| 23 |
+
"longest_edge": 16777216,
|
| 24 |
+
"shortest_edge": 65536
|
| 25 |
+
},
|
| 26 |
+
"temporal_patch_size": 2
|
| 27 |
+
},
|
| 28 |
+
"processor_class": "Qwen3VLProcessor",
|
| 29 |
+
"video_processor": {
|
| 30 |
+
"do_convert_rgb": true,
|
| 31 |
+
"do_normalize": true,
|
| 32 |
+
"do_rescale": true,
|
| 33 |
+
"do_resize": true,
|
| 34 |
+
"do_sample_frames": true,
|
| 35 |
+
"fps": 2,
|
| 36 |
+
"image_mean": [
|
| 37 |
+
0.5,
|
| 38 |
+
0.5,
|
| 39 |
+
0.5
|
| 40 |
+
],
|
| 41 |
+
"image_std": [
|
| 42 |
+
0.5,
|
| 43 |
+
0.5,
|
| 44 |
+
0.5
|
| 45 |
+
],
|
| 46 |
+
"max_frames": 768,
|
| 47 |
+
"merge_size": 2,
|
| 48 |
+
"min_frames": 4,
|
| 49 |
+
"patch_size": 16,
|
| 50 |
+
"resample": 3,
|
| 51 |
+
"rescale_factor": 0.00392156862745098,
|
| 52 |
+
"return_metadata": false,
|
| 53 |
+
"size": {
|
| 54 |
+
"longest_edge": 25165824,
|
| 55 |
+
"shortest_edge": 4096
|
| 56 |
+
},
|
| 57 |
+
"temporal_patch_size": 2,
|
| 58 |
+
"video_processor_type": "Qwen3VLVideoProcessor"
|
| 59 |
+
}
|
| 60 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:87a7830d63fcf43bf241c3c5242e96e62dd3fdc29224ca26fed8ea333db72de4
|
| 3 |
+
size 19989343
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"audio_bos_token": "<|audio_start|>",
|
| 4 |
+
"audio_eos_token": "<|audio_end|>",
|
| 5 |
+
"audio_token": "<|audio_pad|>",
|
| 6 |
+
"backend": "tokenizers",
|
| 7 |
+
"bos_token": null,
|
| 8 |
+
"clean_up_tokenization_spaces": false,
|
| 9 |
+
"eos_token": "<|im_end|>",
|
| 10 |
+
"errors": "replace",
|
| 11 |
+
"image_token": "<|image_pad|>",
|
| 12 |
+
"is_local": true,
|
| 13 |
+
"model_max_length": 262144,
|
| 14 |
+
"model_specific_special_tokens": {
|
| 15 |
+
"audio_bos_token": "<|audio_start|>",
|
| 16 |
+
"audio_eos_token": "<|audio_end|>",
|
| 17 |
+
"audio_token": "<|audio_pad|>",
|
| 18 |
+
"image_token": "<|image_pad|>",
|
| 19 |
+
"video_token": "<|video_pad|>",
|
| 20 |
+
"vision_bos_token": "<|vision_start|>",
|
| 21 |
+
"vision_eos_token": "<|vision_end|>"
|
| 22 |
+
},
|
| 23 |
+
"pad_token": "<|endoftext|>",
|
| 24 |
+
"padding_side": "right",
|
| 25 |
+
"pretokenize_regex": "(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\\r\\n\\p{L}\\p{N}]?[\\p{L}\\p{M}]+|\\p{N}| ?[^\\s\\p{L}\\p{M}\\p{N}]+[\\r\\n]*|\\s*[\\r\\n]+|\\s+(?!\\S)|\\s+",
|
| 26 |
+
"processor_class": "Qwen3VLProcessor",
|
| 27 |
+
"split_special_tokens": false,
|
| 28 |
+
"tokenizer_class": "TokenizersBackend",
|
| 29 |
+
"unk_token": null,
|
| 30 |
+
"video_token": "<|video_pad|>",
|
| 31 |
+
"vision_bos_token": "<|vision_start|>",
|
| 32 |
+
"vision_eos_token": "<|vision_end|>"
|
| 33 |
+
}
|
train_results.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"effective_tokens_per_sec": 7728.355210837788,
|
| 3 |
+
"epoch": 3.0,
|
| 4 |
+
"total_flos": 2.7756656592022405e+18,
|
| 5 |
+
"train_loss": 1.0638441426315526,
|
| 6 |
+
"train_runtime": 6969.7468,
|
| 7 |
+
"train_samples_per_second": 5.992,
|
| 8 |
+
"train_steps_per_second": 0.05
|
| 9 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 2, "total_steps": 348, "loss": 2.133296012878418, "lr": 1.6666666666666667e-06, "epoch": 0.017241379310344827, "percentage": 0.57, "elapsed_time": "0:01:09", "remaining_time": "3:20:34"}
|
| 2 |
+
{"current_steps": 4, "total_steps": 348, "loss": 1.8569927215576172, "lr": 4.9999999999999996e-06, "epoch": 0.034482758620689655, "percentage": 1.15, "elapsed_time": "0:01:59", "remaining_time": "2:51:25"}
|
| 3 |
+
{"current_steps": 6, "total_steps": 348, "loss": 1.9297298192977905, "lr": 8.333333333333334e-06, "epoch": 0.05172413793103448, "percentage": 1.72, "elapsed_time": "0:02:47", "remaining_time": "2:39:33"}
|
| 4 |
+
{"current_steps": 8, "total_steps": 348, "loss": 4.868577480316162, "lr": 1.1666666666666668e-05, "epoch": 0.06896551724137931, "percentage": 2.3, "elapsed_time": "0:03:23", "remaining_time": "2:23:55"}
|
| 5 |
+
{"current_steps": 10, "total_steps": 348, "loss": 2.009166955947876, "lr": 1.5e-05, "epoch": 0.08620689655172414, "percentage": 2.87, "elapsed_time": "0:04:10", "remaining_time": "2:21:04"}
|
| 6 |
+
{"current_steps": 12, "total_steps": 348, "loss": 2.0880112648010254, "lr": 1.8333333333333336e-05, "epoch": 0.10344827586206896, "percentage": 3.45, "elapsed_time": "0:04:44", "remaining_time": "2:12:52"}
|
| 7 |
+
{"current_steps": 14, "total_steps": 348, "loss": 2.3606438636779785, "lr": 2.1666666666666667e-05, "epoch": 0.1206896551724138, "percentage": 4.02, "elapsed_time": "0:05:17", "remaining_time": "2:06:16"}
|
| 8 |
+
{"current_steps": 16, "total_steps": 348, "loss": 1.9572478532791138, "lr": 2.5e-05, "epoch": 0.13793103448275862, "percentage": 4.6, "elapsed_time": "0:06:04", "remaining_time": "2:06:10"}
|
| 9 |
+
{"current_steps": 18, "total_steps": 348, "loss": 1.6358482837677002, "lr": 2.8333333333333332e-05, "epoch": 0.15517241379310345, "percentage": 5.17, "elapsed_time": "0:06:42", "remaining_time": "2:02:50"}
|
| 10 |
+
{"current_steps": 20, "total_steps": 348, "loss": 1.5971399545669556, "lr": 2.999373614050377e-05, "epoch": 0.1724137931034483, "percentage": 5.75, "elapsed_time": "0:07:27", "remaining_time": "2:02:22"}
|
| 11 |
+
{"current_steps": 22, "total_steps": 348, "loss": 1.9199273586273193, "lr": 2.9943660135527193e-05, "epoch": 0.1896551724137931, "percentage": 6.32, "elapsed_time": "0:08:01", "remaining_time": "1:58:57"}
|
| 12 |
+
{"current_steps": 24, "total_steps": 348, "loss": 1.5466594696044922, "lr": 2.984369396041239e-05, "epoch": 0.20689655172413793, "percentage": 6.9, "elapsed_time": "0:08:50", "remaining_time": "1:59:18"}
|
| 13 |
+
{"current_steps": 26, "total_steps": 348, "loss": 1.039365530014038, "lr": 2.9694208595192654e-05, "epoch": 0.22413793103448276, "percentage": 7.47, "elapsed_time": "0:09:19", "remaining_time": "1:55:26"}
|
| 14 |
+
{"current_steps": 28, "total_steps": 348, "loss": 1.457193374633789, "lr": 2.949575878836866e-05, "epoch": 0.2413793103448276, "percentage": 8.05, "elapsed_time": "0:10:00", "remaining_time": "1:54:17"}
|
| 15 |
+
{"current_steps": 30, "total_steps": 348, "loss": 1.247235894203186, "lr": 2.9249080998205997e-05, "epoch": 0.25862068965517243, "percentage": 8.62, "elapsed_time": "0:10:36", "remaining_time": "1:52:22"}
|
| 16 |
+
{"current_steps": 32, "total_steps": 348, "loss": 1.132712721824646, "lr": 2.895509065969754e-05, "epoch": 0.27586206896551724, "percentage": 9.2, "elapsed_time": "0:11:21", "remaining_time": "1:52:12"}
|
| 17 |
+
{"current_steps": 34, "total_steps": 348, "loss": 1.1004750728607178, "lr": 2.861487878733334e-05, "epoch": 0.29310344827586204, "percentage": 9.77, "elapsed_time": "0:11:57", "remaining_time": "1:50:21"}
|
| 18 |
+
{"current_steps": 36, "total_steps": 348, "loss": 1.2603535652160645, "lr": 2.8229707926285193e-05, "epoch": 0.3103448275862069, "percentage": 10.34, "elapsed_time": "0:12:30", "remaining_time": "1:48:24"}
|
| 19 |
+
{"current_steps": 38, "total_steps": 348, "loss": 1.564131259918213, "lr": 2.7801007467031303e-05, "epoch": 0.3275862068965517, "percentage": 10.92, "elapsed_time": "0:13:17", "remaining_time": "1:48:22"}
|
| 20 |
+
{"current_steps": 40, "total_steps": 348, "loss": 1.3439611196517944, "lr": 2.7330368340808766e-05, "epoch": 0.3448275862068966, "percentage": 11.49, "elapsed_time": "0:14:09", "remaining_time": "1:49:03"}
|
| 21 |
+
{"current_steps": 42, "total_steps": 348, "loss": 1.653893232345581, "lr": 2.68195371155793e-05, "epoch": 0.3620689655172414, "percentage": 12.07, "elapsed_time": "0:14:50", "remaining_time": "1:48:06"}
|
| 22 |
+
{"current_steps": 44, "total_steps": 348, "loss": 1.345064401626587, "lr": 2.6270409514418447e-05, "epoch": 0.3793103448275862, "percentage": 12.64, "elapsed_time": "0:15:37", "remaining_time": "1:47:58"}
|
| 23 |
+
{"current_steps": 46, "total_steps": 348, "loss": 1.3374089002609253, "lr": 2.568502338038184e-05, "epoch": 0.39655172413793105, "percentage": 13.22, "elapsed_time": "0:16:26", "remaining_time": "1:47:56"}
|
| 24 |
+
{"current_steps": 48, "total_steps": 348, "loss": 1.3249835968017578, "lr": 2.5065551113956345e-05, "epoch": 0.41379310344827586, "percentage": 13.79, "elapsed_time": "0:17:09", "remaining_time": "1:47:13"}
|
| 25 |
+
{"current_steps": 50, "total_steps": 348, "loss": 1.143474817276001, "lr": 2.441429161116107e-05, "epoch": 0.43103448275862066, "percentage": 14.37, "elapsed_time": "0:17:55", "remaining_time": "1:46:52"}
|
| 26 |
+
{"current_steps": 52, "total_steps": 348, "loss": 0.91184401512146, "lr": 2.3733661732216455e-05, "epoch": 0.4482758620689655, "percentage": 14.94, "elapsed_time": "0:18:27", "remaining_time": "1:45:06"}
|
| 27 |
+
{"current_steps": 54, "total_steps": 348, "loss": 1.1794605255126953, "lr": 2.3026187332441612e-05, "epoch": 0.46551724137931033, "percentage": 15.52, "elapsed_time": "0:19:01", "remaining_time": "1:43:34"}
|
| 28 |
+
{"current_steps": 56, "total_steps": 348, "loss": 1.2965553998947144, "lr": 2.2294493888664832e-05, "epoch": 0.4827586206896552, "percentage": 16.09, "elapsed_time": "0:19:49", "remaining_time": "1:43:22"}
|
| 29 |
+
{"current_steps": 58, "total_steps": 348, "loss": 1.4213244915008545, "lr": 2.1541296755933106e-05, "epoch": 0.5, "percentage": 16.67, "elapsed_time": "0:20:37", "remaining_time": "1:43:08"}
|
| 30 |
+
{"current_steps": 60, "total_steps": 348, "loss": 1.25790274143219, "lr": 2.0769391090678594e-05, "epoch": 0.5172413793103449, "percentage": 17.24, "elapsed_time": "0:21:17", "remaining_time": "1:42:10"}
|
| 31 |
+
{"current_steps": 62, "total_steps": 348, "loss": 1.2669916152954102, "lr": 1.998164147773771e-05, "epoch": 0.5344827586206896, "percentage": 17.82, "elapsed_time": "0:22:01", "remaining_time": "1:41:37"}
|
| 32 |
+
{"current_steps": 64, "total_steps": 348, "loss": 1.0608755350112915, "lr": 1.9180971299717562e-05, "epoch": 0.5517241379310345, "percentage": 18.39, "elapsed_time": "0:22:29", "remaining_time": "1:39:49"}
|
| 33 |
+
{"current_steps": 66, "total_steps": 348, "loss": 1.2603998184204102, "lr": 1.8370351888160553e-05, "epoch": 0.5689655172413793, "percentage": 18.97, "elapsed_time": "0:23:11", "remaining_time": "1:39:06"}
|
| 34 |
+
{"current_steps": 68, "total_steps": 348, "loss": 0.9561605453491211, "lr": 1.755279149676784e-05, "epoch": 0.5862068965517241, "percentage": 19.54, "elapsed_time": "0:23:46", "remaining_time": "1:37:52"}
|
| 35 |
+
{"current_steps": 70, "total_steps": 348, "loss": 1.0841741561889648, "lr": 1.6731324137602643e-05, "epoch": 0.603448275862069, "percentage": 20.11, "elapsed_time": "0:24:22", "remaining_time": "1:36:47"}
|
| 36 |
+
{"current_steps": 72, "total_steps": 348, "loss": 0.8117428421974182, "lr": 1.590899832170276e-05, "epoch": 0.6206896551724138, "percentage": 20.69, "elapsed_time": "0:24:57", "remaining_time": "1:35:40"}
|
| 37 |
+
{"current_steps": 74, "total_steps": 348, "loss": 0.8893211483955383, "lr": 1.5088865745886681e-05, "epoch": 0.6379310344827587, "percentage": 21.26, "elapsed_time": "0:25:25", "remaining_time": "1:34:08"}
|
| 38 |
+
{"current_steps": 76, "total_steps": 348, "loss": 1.256456732749939, "lr": 1.4273969967736997e-05, "epoch": 0.6551724137931034, "percentage": 21.84, "elapsed_time": "0:26:15", "remaining_time": "1:33:58"}
|
| 39 |
+
{"current_steps": 78, "total_steps": 348, "loss": 1.0219424962997437, "lr": 1.3467335110789045e-05, "epoch": 0.6724137931034483, "percentage": 22.41, "elapsed_time": "0:27:01", "remaining_time": "1:33:32"}
|
| 40 |
+
{"current_steps": 80, "total_steps": 348, "loss": 1.3244811296463013, "lr": 1.267195464184032e-05, "epoch": 0.6896551724137931, "percentage": 22.99, "elapsed_time": "0:27:48", "remaining_time": "1:33:07"}
|
| 41 |
+
{"current_steps": 82, "total_steps": 348, "loss": 1.2907791137695312, "lr": 1.1890780262028785e-05, "epoch": 0.7068965517241379, "percentage": 23.56, "elapsed_time": "0:28:30", "remaining_time": "1:32:27"}
|
| 42 |
+
{"current_steps": 84, "total_steps": 348, "loss": 1.0599552392959595, "lr": 1.1126710952905864e-05, "epoch": 0.7241379310344828, "percentage": 24.14, "elapsed_time": "0:29:17", "remaining_time": "1:32:02"}
|
| 43 |
+
{"current_steps": 86, "total_steps": 348, "loss": 0.931882381439209, "lr": 1.0382582218154716e-05, "epoch": 0.7413793103448276, "percentage": 24.71, "elapsed_time": "0:29:45", "remaining_time": "1:30:39"}
|
| 44 |
+
{"current_steps": 88, "total_steps": 348, "loss": 0.7623745203018188, "lr": 9.661155560878463e-06, "epoch": 0.7586206896551724, "percentage": 25.29, "elapsed_time": "0:30:14", "remaining_time": "1:29:20"}
|
| 45 |
+
{"current_steps": 90, "total_steps": 348, "loss": 0.8441999554634094, "lr": 8.965108235508627e-06, "epoch": 0.7758620689655172, "percentage": 25.86, "elapsed_time": "0:30:39", "remaining_time": "1:27:53"}
|
| 46 |
+
{"current_steps": 92, "total_steps": 348, "loss": 0.8531256318092346, "lr": 8.29702331236512e-06, "epoch": 0.7931034482758621, "percentage": 26.44, "elapsed_time": "0:31:10", "remaining_time": "1:26:43"}
|
| 47 |
+
{"current_steps": 94, "total_steps": 348, "loss": 0.8273341655731201, "lr": 7.659380091738652e-06, "epoch": 0.8103448275862069, "percentage": 27.01, "elapsed_time": "0:31:44", "remaining_time": "1:25:46"}
|
| 48 |
+
{"current_steps": 96, "total_steps": 348, "loss": 0.4431428015232086, "lr": 7.054544903069566e-06, "epoch": 0.8275862068965517, "percentage": 27.59, "elapsed_time": "0:32:13", "remaining_time": "1:24:35"}
|
| 49 |
+
{"current_steps": 98, "total_steps": 348, "loss": 1.2755130529403687, "lr": 6.484762323367704e-06, "epoch": 0.8448275862068966, "percentage": 28.16, "elapsed_time": "0:33:02", "remaining_time": "1:24:18"}
|
| 50 |
+
{"current_steps": 100, "total_steps": 348, "loss": 1.144418716430664, "lr": 5.9521468474623304e-06, "epoch": 0.8620689655172413, "percentage": 28.74, "elapsed_time": "0:33:30", "remaining_time": "1:23:05"}
|
| 51 |
+
{"current_steps": 102, "total_steps": 348, "loss": 1.2321348190307617, "lr": 5.458675040994306e-06, "epoch": 0.8793103448275862, "percentage": 29.31, "elapsed_time": "0:34:18", "remaining_time": "1:22:44"}
|
| 52 |
+
{"current_steps": 104, "total_steps": 348, "loss": 0.9103032946586609, "lr": 5.006178205271082e-06, "epoch": 0.896551724137931, "percentage": 29.89, "elapsed_time": "0:34:53", "remaining_time": "1:21:51"}
|
| 53 |
+
{"current_steps": 106, "total_steps": 348, "loss": 1.2454521656036377, "lr": 4.596335581205745e-06, "epoch": 0.9137931034482759, "percentage": 30.46, "elapsed_time": "0:35:35", "remaining_time": "1:21:16"}
|
| 54 |
+
{"current_steps": 108, "total_steps": 348, "loss": 1.1100295782089233, "lr": 4.230668117560634e-06, "epoch": 0.9310344827586207, "percentage": 31.03, "elapsed_time": "0:36:05", "remaining_time": "1:20:11"}
|
| 55 |
+
{"current_steps": 110, "total_steps": 348, "loss": 1.0047061443328857, "lr": 3.910532826621947e-06, "epoch": 0.9482758620689655, "percentage": 31.61, "elapsed_time": "0:36:41", "remaining_time": "1:19:23"}
|
| 56 |
+
{"current_steps": 112, "total_steps": 348, "loss": 0.7593088150024414, "lr": 3.6371177482516813e-06, "epoch": 0.9655172413793104, "percentage": 32.18, "elapsed_time": "0:37:18", "remaining_time": "1:18:36"}
|
| 57 |
+
{"current_steps": 114, "total_steps": 348, "loss": 0.8851341605186462, "lr": 3.411437541005667e-06, "epoch": 0.9827586206896551, "percentage": 32.76, "elapsed_time": "0:37:43", "remaining_time": "1:17:26"}
|
| 58 |
+
{"current_steps": 116, "total_steps": 348, "loss": 1.245699405670166, "lr": 3.234329716679273e-06, "epoch": 1.0, "percentage": 33.33, "elapsed_time": "0:38:31", "remaining_time": "1:17:03"}
|
| 59 |
+
{"current_steps": 118, "total_steps": 348, "loss": 1.221221923828125, "lr": 3.1064515322545505e-06, "epoch": 1.0172413793103448, "percentage": 33.91, "elapsed_time": "0:39:35", "remaining_time": "1:17:10"}
|
| 60 |
+
{"current_steps": 120, "total_steps": 348, "loss": 0.7024672627449036, "lr": 3.0282775507830497e-06, "epoch": 1.0344827586206897, "percentage": 34.48, "elapsed_time": "0:40:00", "remaining_time": "1:16:00"}
|
| 61 |
+
{"current_steps": 122, "total_steps": 348, "loss": 1.2402740716934204, "lr": 3.000097880255948e-06, "epoch": 1.0517241379310345, "percentage": 35.06, "elapsed_time": "0:40:49", "remaining_time": "1:15:37"}
|
| 62 |
+
{"current_steps": 124, "total_steps": 348, "loss": 1.1297236680984497, "lr": 3.0220170969971693e-06, "epoch": 1.0689655172413792, "percentage": 35.63, "elapsed_time": "0:41:33", "remaining_time": "1:15:05"}
|
| 63 |
+
{"current_steps": 126, "total_steps": 348, "loss": 1.0468320846557617, "lr": 3.093953857574826e-06, "epoch": 1.0862068965517242, "percentage": 36.21, "elapsed_time": "0:42:14", "remaining_time": "1:14:25"}
|
| 64 |
+
{"current_steps": 128, "total_steps": 348, "loss": 1.190972089767456, "lr": 3.2156412006712305e-06, "epoch": 1.103448275862069, "percentage": 36.78, "elapsed_time": "0:43:03", "remaining_time": "1:14:00"}
|
| 65 |
+
{"current_steps": 130, "total_steps": 348, "loss": 0.83074551820755, "lr": 3.386627537791162e-06, "epoch": 1.1206896551724137, "percentage": 37.36, "elapsed_time": "0:43:33", "remaining_time": "1:13:03"}
|
| 66 |
+
{"current_steps": 132, "total_steps": 348, "loss": 1.2269715070724487, "lr": 3.606278329131906e-06, "epoch": 1.1379310344827587, "percentage": 37.93, "elapsed_time": "0:44:16", "remaining_time": "1:12:26"}
|
| 67 |
+
{"current_steps": 134, "total_steps": 348, "loss": 0.5150075554847717, "lr": 3.873778438395723e-06, "epoch": 1.1551724137931034, "percentage": 38.51, "elapsed_time": "0:44:40", "remaining_time": "1:11:21"}
|
| 68 |
+
{"current_steps": 136, "total_steps": 348, "loss": 0.47654953598976135, "lr": 4.1881351578059545e-06, "epoch": 1.1724137931034484, "percentage": 39.08, "elapsed_time": "0:45:02", "remaining_time": "1:10:12"}
|
| 69 |
+
{"current_steps": 138, "total_steps": 348, "loss": 1.2832177877426147, "lr": 4.5481818921007985e-06, "epoch": 1.1896551724137931, "percentage": 39.66, "elapsed_time": "0:45:49", "remaining_time": "1:09:44"}
|
| 70 |
+
{"current_steps": 140, "total_steps": 348, "loss": 1.1527303457260132, "lr": 4.952582487833158e-06, "epoch": 1.206896551724138, "percentage": 40.23, "elapsed_time": "0:46:37", "remaining_time": "1:09:16"}
|
| 71 |
+
{"current_steps": 142, "total_steps": 348, "loss": 1.4890146255493164, "lr": 5.3998361919103724e-06, "epoch": 1.2241379310344827, "percentage": 40.8, "elapsed_time": "0:47:24", "remaining_time": "1:08:46"}
|
| 72 |
+
{"current_steps": 144, "total_steps": 348, "loss": 1.1686463356018066, "lr": 5.888283220972366e-06, "epoch": 1.2413793103448276, "percentage": 41.38, "elapsed_time": "0:48:13", "remaining_time": "1:08:18"}
|
| 73 |
+
{"current_steps": 146, "total_steps": 348, "loss": 1.1420594453811646, "lr": 6.416110920939995e-06, "epoch": 1.2586206896551724, "percentage": 41.95, "elapsed_time": "0:48:57", "remaining_time": "1:07:44"}
|
| 74 |
+
{"current_steps": 148, "total_steps": 348, "loss": 0.8097314834594727, "lr": 6.981360493875075e-06, "epoch": 1.2758620689655173, "percentage": 42.53, "elapsed_time": "0:49:35", "remaining_time": "1:07:00"}
|
| 75 |
+
{"current_steps": 150, "total_steps": 348, "loss": 1.1563327312469482, "lr": 7.581934267188444e-06, "epoch": 1.293103448275862, "percentage": 43.1, "elapsed_time": "0:50:26", "remaining_time": "1:06:35"}
|
| 76 |
+
{"current_steps": 152, "total_steps": 348, "loss": 0.7376953959465027, "lr": 8.215603478219558e-06, "epoch": 1.3103448275862069, "percentage": 43.68, "elapsed_time": "0:51:03", "remaining_time": "1:05:50"}
|
| 77 |
+
{"current_steps": 154, "total_steps": 348, "loss": 0.8840894103050232, "lr": 8.880016545298615e-06, "epoch": 1.3275862068965516, "percentage": 44.25, "elapsed_time": "0:51:33", "remaining_time": "1:04:57"}
|
| 78 |
+
{"current_steps": 156, "total_steps": 348, "loss": 1.3034617900848389, "lr": 9.572707794596881e-06, "epoch": 1.3448275862068966, "percentage": 44.83, "elapsed_time": "0:52:14", "remaining_time": "1:04:17"}
|
| 79 |
+
{"current_steps": 158, "total_steps": 348, "loss": 1.2110811471939087, "lr": 1.0291106610379316e-05, "epoch": 1.3620689655172413, "percentage": 45.4, "elapsed_time": "0:53:04", "remaining_time": "1:03:49"}
|
| 80 |
+
{"current_steps": 160, "total_steps": 348, "loss": 1.174965262413025, "lr": 1.1032546974702523e-05, "epoch": 1.3793103448275863, "percentage": 45.98, "elapsed_time": "0:53:53", "remaining_time": "1:03:19"}
|
| 81 |
+
{"current_steps": 162, "total_steps": 348, "loss": 0.9785462021827698, "lr": 1.1794277361155493e-05, "epoch": 1.396551724137931, "percentage": 46.55, "elapsed_time": "0:54:29", "remaining_time": "1:02:34"}
|
| 82 |
+
{"current_steps": 164, "total_steps": 348, "loss": 0.7486613988876343, "lr": 1.2573470945927114e-05, "epoch": 1.4137931034482758, "percentage": 47.13, "elapsed_time": "0:55:05", "remaining_time": "1:01:48"}
|
| 83 |
+
{"current_steps": 166, "total_steps": 348, "loss": 1.2081069946289062, "lr": 1.3367236098306466e-05, "epoch": 1.4310344827586206, "percentage": 47.7, "elapsed_time": "0:55:56", "remaining_time": "1:01:19"}
|
| 84 |
+
{"current_steps": 168, "total_steps": 348, "loss": 1.2631800174713135, "lr": 1.4172627111685208e-05, "epoch": 1.4482758620689655, "percentage": 48.28, "elapsed_time": "0:56:44", "remaining_time": "1:00:47"}
|
| 85 |
+
{"current_steps": 170, "total_steps": 348, "loss": 0.8129775524139404, "lr": 1.4986655135238434e-05, "epoch": 1.4655172413793103, "percentage": 48.85, "elapsed_time": "0:57:20", "remaining_time": "1:00:02"}
|
| 86 |
+
{"current_steps": 172, "total_steps": 348, "loss": 1.2445049285888672, "lr": 1.5806299265716396e-05, "epoch": 1.4827586206896552, "percentage": 49.43, "elapsed_time": "0:58:03", "remaining_time": "0:59:24"}
|
| 87 |
+
{"current_steps": 174, "total_steps": 348, "loss": 0.7052794694900513, "lr": 1.6628517758184285e-05, "epoch": 1.5, "percentage": 50.0, "elapsed_time": "0:58:34", "remaining_time": "0:58:34"}
|
| 88 |
+
{"current_steps": 176, "total_steps": 348, "loss": 1.4724665880203247, "lr": 1.7450259314106843e-05, "epoch": 1.5172413793103448, "percentage": 50.57, "elapsed_time": "0:59:24", "remaining_time": "0:58:03"}
|
| 89 |
+
{"current_steps": 178, "total_steps": 348, "loss": 0.32006025314331055, "lr": 1.826847440488684e-05, "epoch": 1.5344827586206895, "percentage": 51.15, "elapsed_time": "0:59:50", "remaining_time": "0:57:09"}
|
| 90 |
+
{"current_steps": 180, "total_steps": 348, "loss": 0.29676616191864014, "lr": 1.9080126588835274e-05, "epoch": 1.5517241379310345, "percentage": 51.72, "elapsed_time": "1:00:13", "remaining_time": "0:56:12"}
|
| 91 |
+
{"current_steps": 182, "total_steps": 348, "loss": 1.1562625169754028, "lr": 1.9882203779575198e-05, "epoch": 1.5689655172413794, "percentage": 52.3, "elapsed_time": "1:00:59", "remaining_time": "0:55:38"}
|
| 92 |
+
{"current_steps": 184, "total_steps": 348, "loss": 0.9202221632003784, "lr": 2.0671729424061787e-05, "epoch": 1.5862068965517242, "percentage": 52.87, "elapsed_time": "1:01:47", "remaining_time": "0:55:04"}
|
| 93 |
+
{"current_steps": 186, "total_steps": 348, "loss": 1.2155585289001465, "lr": 2.144577354873615e-05, "epoch": 1.603448275862069, "percentage": 53.45, "elapsed_time": "1:02:35", "remaining_time": "0:54:31"}
|
| 94 |
+
{"current_steps": 188, "total_steps": 348, "loss": 1.167516827583313, "lr": 2.220146363282005e-05, "epoch": 1.6206896551724137, "percentage": 54.02, "elapsed_time": "1:03:24", "remaining_time": "0:53:57"}
|
| 95 |
+
{"current_steps": 190, "total_steps": 348, "loss": 1.1323333978652954, "lr": 2.2935995268400318e-05, "epoch": 1.6379310344827587, "percentage": 54.6, "elapsed_time": "1:04:11", "remaining_time": "0:53:23"}
|
| 96 |
+
{"current_steps": 192, "total_steps": 348, "loss": 0.9196767807006836, "lr": 2.3646642567742345e-05, "epoch": 1.6551724137931034, "percentage": 55.17, "elapsed_time": "1:04:47", "remaining_time": "0:52:38"}
|
| 97 |
+
{"current_steps": 194, "total_steps": 348, "loss": 0.9174919724464417, "lr": 2.433076827921117e-05, "epoch": 1.6724137931034484, "percentage": 55.75, "elapsed_time": "1:05:16", "remaining_time": "0:51:48"}
|
| 98 |
+
{"current_steps": 196, "total_steps": 348, "loss": 1.3593827486038208, "lr": 2.4985833574258856e-05, "epoch": 1.6896551724137931, "percentage": 56.32, "elapsed_time": "1:05:48", "remaining_time": "0:51:02"}
|
| 99 |
+
{"current_steps": 198, "total_steps": 348, "loss": 0.9581778645515442, "lr": 2.560940746915833e-05, "epoch": 1.706896551724138, "percentage": 56.9, "elapsed_time": "1:06:19", "remaining_time": "0:50:14"}
|
| 100 |
+
{"current_steps": 200, "total_steps": 348, "loss": 0.6433994174003601, "lr": 2.619917584651918e-05, "epoch": 1.7241379310344827, "percentage": 57.47, "elapsed_time": "1:06:53", "remaining_time": "0:49:30"}
|
| 101 |
+
{"current_steps": 202, "total_steps": 348, "loss": 1.1657538414001465, "lr": 2.6752950043105974e-05, "epoch": 1.7413793103448276, "percentage": 58.05, "elapsed_time": "1:07:35", "remaining_time": "0:48:51"}
|
| 102 |
+
{"current_steps": 204, "total_steps": 348, "loss": 1.1429758071899414, "lr": 2.7268674972089096e-05, "epoch": 1.7586206896551724, "percentage": 58.62, "elapsed_time": "1:08:25", "remaining_time": "0:48:17"}
|
| 103 |
+
{"current_steps": 206, "total_steps": 348, "loss": 1.2312031984329224, "lr": 2.7744436749586343e-05, "epoch": 1.7758620689655173, "percentage": 59.2, "elapsed_time": "1:09:11", "remaining_time": "0:47:41"}
|
| 104 |
+
{"current_steps": 208, "total_steps": 348, "loss": 1.1482793092727661, "lr": 2.8178469797192326e-05, "epoch": 1.793103448275862, "percentage": 59.77, "elapsed_time": "1:09:53", "remaining_time": "0:47:02"}
|
| 105 |
+
{"current_steps": 210, "total_steps": 348, "loss": 1.1373822689056396, "lr": 2.856916339413811e-05, "epoch": 1.8103448275862069, "percentage": 60.34, "elapsed_time": "1:10:40", "remaining_time": "0:46:26"}
|
| 106 |
+
{"current_steps": 212, "total_steps": 348, "loss": 0.8796662092208862, "lr": 2.8915067654765562e-05, "epoch": 1.8275862068965516, "percentage": 60.92, "elapsed_time": "1:11:26", "remaining_time": "0:45:50"}
|
| 107 |
+
{"current_steps": 214, "total_steps": 348, "loss": 1.0384447574615479, "lr": 2.9214898909133586e-05, "epoch": 1.8448275862068966, "percentage": 61.49, "elapsed_time": "1:12:08", "remaining_time": "0:45:10"}
|
| 108 |
+
{"current_steps": 216, "total_steps": 348, "loss": 0.5491471290588379, "lr": 2.946754446678865e-05, "epoch": 1.8620689655172413, "percentage": 62.07, "elapsed_time": "1:12:36", "remaining_time": "0:44:22"}
|
| 109 |
+
{"current_steps": 218, "total_steps": 348, "loss": 0.8381883502006531, "lr": 2.967206674602094e-05, "epoch": 1.8793103448275863, "percentage": 62.64, "elapsed_time": "1:13:05", "remaining_time": "0:43:35"}
|
| 110 |
+
{"current_steps": 220, "total_steps": 348, "loss": 0.8339143991470337, "lr": 2.98277067532822e-05, "epoch": 1.896551724137931, "percentage": 63.22, "elapsed_time": "1:13:28", "remaining_time": "0:42:45"}
|
| 111 |
+
{"current_steps": 222, "total_steps": 348, "loss": 0.8806865215301514, "lr": 2.993388689985295e-05, "epoch": 1.9137931034482758, "percentage": 63.79, "elapsed_time": "1:14:03", "remaining_time": "0:42:02"}
|
| 112 |
+
{"current_steps": 224, "total_steps": 348, "loss": 1.1154509782791138, "lr": 2.9990213145306393e-05, "epoch": 1.9310344827586206, "percentage": 64.37, "elapsed_time": "1:14:42", "remaining_time": "0:41:21"}
|
| 113 |
+
{"current_steps": 226, "total_steps": 348, "loss": 0.8699172735214233, "lr": 2.999647645981429e-05, "epoch": 1.9482758620689655, "percentage": 64.94, "elapsed_time": "1:15:18", "remaining_time": "0:40:39"}
|
| 114 |
+
{"current_steps": 228, "total_steps": 348, "loss": 1.0112342834472656, "lr": 2.995265359986831e-05, "epoch": 1.9655172413793105, "percentage": 65.52, "elapsed_time": "1:15:52", "remaining_time": "0:39:56"}
|
| 115 |
+
{"current_steps": 230, "total_steps": 348, "loss": 1.221873164176941, "lr": 2.9858907194537958e-05, "epoch": 1.9827586206896552, "percentage": 66.09, "elapsed_time": "1:16:34", "remaining_time": "0:39:17"}
|
| 116 |
+
{"current_steps": 232, "total_steps": 348, "loss": 0.8529492616653442, "lr": 2.9715585141945073e-05, "epoch": 2.0, "percentage": 66.67, "elapsed_time": "1:17:12", "remaining_time": "0:38:36"}
|
| 117 |
+
{"current_steps": 234, "total_steps": 348, "loss": 1.0910695791244507, "lr": 2.9523219318194555e-05, "epoch": 2.0172413793103448, "percentage": 67.24, "elapsed_time": "1:18:29", "remaining_time": "0:38:14"}
|
| 118 |
+
{"current_steps": 236, "total_steps": 348, "loss": 0.39936938881874084, "lr": 2.9282523603552557e-05, "epoch": 2.0344827586206895, "percentage": 67.82, "elapsed_time": "1:18:51", "remaining_time": "0:37:25"}
|
| 119 |
+
{"current_steps": 238, "total_steps": 348, "loss": 0.6455128788948059, "lr": 2.899439123319714e-05, "epoch": 2.0517241379310347, "percentage": 68.39, "elapsed_time": "1:19:30", "remaining_time": "0:36:44"}
|
| 120 |
+
{"current_steps": 240, "total_steps": 348, "loss": 1.0364433526992798, "lr": 2.86598914823729e-05, "epoch": 2.0689655172413794, "percentage": 68.97, "elapsed_time": "1:20:19", "remaining_time": "0:36:08"}
|
| 121 |
+
{"current_steps": 242, "total_steps": 348, "loss": 0.6382094025611877, "lr": 2.8280265698251005e-05, "epoch": 2.086206896551724, "percentage": 69.54, "elapsed_time": "1:20:54", "remaining_time": "0:35:26"}
|
| 122 |
+
{"current_steps": 244, "total_steps": 348, "loss": 0.6532158255577087, "lr": 2.7856922693220947e-05, "epoch": 2.103448275862069, "percentage": 70.11, "elapsed_time": "1:21:21", "remaining_time": "0:34:40"}
|
| 123 |
+
{"current_steps": 246, "total_steps": 348, "loss": 0.8184542655944824, "lr": 2.739143351670935e-05, "epoch": 2.1206896551724137, "percentage": 70.69, "elapsed_time": "1:22:01", "remaining_time": "0:34:00"}
|
| 124 |
+
{"current_steps": 248, "total_steps": 348, "loss": 0.675823986530304, "lr": 2.688552562492825e-05, "epoch": 2.1379310344827585, "percentage": 71.26, "elapsed_time": "1:22:38", "remaining_time": "0:33:19"}
|
| 125 |
+
{"current_steps": 250, "total_steps": 348, "loss": 0.7770854830741882, "lr": 2.6341076470189065e-05, "epoch": 2.1551724137931036, "percentage": 71.84, "elapsed_time": "1:23:08", "remaining_time": "0:32:35"}
|
| 126 |
+
{"current_steps": 252, "total_steps": 348, "loss": 0.713956356048584, "lr": 2.5760106533572648e-05, "epoch": 2.1724137931034484, "percentage": 72.41, "elapsed_time": "1:23:40", "remaining_time": "0:31:52"}
|
| 127 |
+
{"current_steps": 254, "total_steps": 348, "loss": 0.4886062741279602, "lr": 2.5144771826811748e-05, "epoch": 2.189655172413793, "percentage": 72.99, "elapsed_time": "1:24:13", "remaining_time": "0:31:10"}
|
| 128 |
+
{"current_steps": 256, "total_steps": 348, "loss": 1.1009202003479004, "lr": 2.4497355891211547e-05, "epoch": 2.206896551724138, "percentage": 73.56, "elapsed_time": "1:25:03", "remaining_time": "0:30:33"}
|
| 129 |
+
{"current_steps": 258, "total_steps": 348, "loss": 1.0635813474655151, "lr": 2.3820261323300925e-05, "epoch": 2.2241379310344827, "percentage": 74.14, "elapsed_time": "1:25:49", "remaining_time": "0:29:56"}
|
| 130 |
+
{"current_steps": 260, "total_steps": 348, "loss": 0.9566066265106201, "lr": 2.311600085866322e-05, "epoch": 2.2413793103448274, "percentage": 74.71, "elapsed_time": "1:26:33", "remaining_time": "0:29:17"}
|
| 131 |
+
{"current_steps": 262, "total_steps": 348, "loss": 0.6884672045707703, "lr": 2.238718804703476e-05, "epoch": 2.2586206896551726, "percentage": 75.29, "elapsed_time": "1:27:07", "remaining_time": "0:28:35"}
|
| 132 |
+
{"current_steps": 264, "total_steps": 348, "loss": 0.9428229928016663, "lr": 2.163652755327646e-05, "epoch": 2.2758620689655173, "percentage": 75.86, "elapsed_time": "1:27:47", "remaining_time": "0:27:56"}
|
| 133 |
+
{"current_steps": 266, "total_steps": 348, "loss": 0.6498053669929504, "lr": 2.0866805120212266e-05, "epoch": 2.293103448275862, "percentage": 76.44, "elapsed_time": "1:28:26", "remaining_time": "0:27:15"}
|
| 134 |
+
{"current_steps": 268, "total_steps": 348, "loss": 1.1316256523132324, "lr": 2.0080877230582565e-05, "epoch": 2.310344827586207, "percentage": 77.01, "elapsed_time": "1:29:08", "remaining_time": "0:26:36"}
|
| 135 |
+
{"current_steps": 270, "total_steps": 348, "loss": 1.0687792301177979, "lr": 1.9281660506477953e-05, "epoch": 2.3275862068965516, "percentage": 77.59, "elapsed_time": "1:29:56", "remaining_time": "0:25:58"}
|
| 136 |
+
{"current_steps": 272, "total_steps": 348, "loss": 1.125198245048523, "lr": 1.8472120885592557e-05, "epoch": 2.344827586206897, "percentage": 78.16, "elapsed_time": "1:30:42", "remaining_time": "0:25:20"}
|
| 137 |
+
{"current_steps": 274, "total_steps": 348, "loss": 0.5221872329711914, "lr": 1.7655262614464347e-05, "epoch": 2.3620689655172415, "percentage": 78.74, "elapsed_time": "1:31:17", "remaining_time": "0:24:39"}
|
| 138 |
+
{"current_steps": 276, "total_steps": 348, "loss": 1.0470247268676758, "lr": 1.683411709954916e-05, "epoch": 2.3793103448275863, "percentage": 79.31, "elapsed_time": "1:32:05", "remaining_time": "0:24:01"}
|
| 139 |
+
{"current_steps": 278, "total_steps": 348, "loss": 0.6991406679153442, "lr": 1.6011731657502948e-05, "epoch": 2.396551724137931, "percentage": 79.89, "elapsed_time": "1:32:43", "remaining_time": "0:23:20"}
|
| 140 |
+
{"current_steps": 280, "total_steps": 348, "loss": 1.0698401927947998, "lr": 1.5191158206420104e-05, "epoch": 2.413793103448276, "percentage": 80.46, "elapsed_time": "1:33:21", "remaining_time": "0:22:40"}
|
| 141 |
+
{"current_steps": 282, "total_steps": 348, "loss": 0.9676493406295776, "lr": 1.4375441939995862e-05, "epoch": 2.4310344827586206, "percentage": 81.03, "elapsed_time": "1:34:09", "remaining_time": "0:22:02"}
|
| 142 |
+
{"current_steps": 284, "total_steps": 348, "loss": 0.8159676194190979, "lr": 1.3567610026643158e-05, "epoch": 2.4482758620689653, "percentage": 81.61, "elapsed_time": "1:34:43", "remaining_time": "0:21:20"}
|
| 143 |
+
{"current_steps": 286, "total_steps": 348, "loss": 0.731606125831604, "lr": 1.2770660375502325e-05, "epoch": 2.4655172413793105, "percentage": 82.18, "elapsed_time": "1:35:18", "remaining_time": "0:20:39"}
|
| 144 |
+
{"current_steps": 288, "total_steps": 348, "loss": 0.9657235145568848, "lr": 1.1987550511033729e-05, "epoch": 2.4827586206896552, "percentage": 82.76, "elapsed_time": "1:35:59", "remaining_time": "0:19:59"}
|
| 145 |
+
{"current_steps": 290, "total_steps": 348, "loss": 0.6879425644874573, "lr": 1.1221186597479921e-05, "epoch": 2.5, "percentage": 83.33, "elapsed_time": "1:36:28", "remaining_time": "0:19:17"}
|
| 146 |
+
{"current_steps": 292, "total_steps": 348, "loss": 0.6442292928695679, "lr": 1.047441265392866e-05, "epoch": 2.5172413793103448, "percentage": 83.91, "elapsed_time": "1:37:03", "remaining_time": "0:18:36"}
|
| 147 |
+
{"current_steps": 294, "total_steps": 348, "loss": 1.2038459777832031, "lr": 9.74999999999999e-06, "epoch": 2.5344827586206895, "percentage": 84.48, "elapsed_time": "1:37:52", "remaining_time": "0:17:58"}
|
| 148 |
+
{"current_steps": 296, "total_steps": 348, "loss": 0.6161110401153564, "lr": 9.05063697132504e-06, "epoch": 2.5517241379310347, "percentage": 85.06, "elapsed_time": "1:38:32", "remaining_time": "0:17:18"}
|
| 149 |
+
{"current_steps": 298, "total_steps": 348, "loss": 0.4709848463535309, "lr": 8.378918942982984e-06, "epoch": 2.5689655172413794, "percentage": 85.63, "elapsed_time": "1:39:05", "remaining_time": "0:16:37"}
|
| 150 |
+
{"current_steps": 300, "total_steps": 348, "loss": 0.9857034683227539, "lr": 7.7373386979199e-06, "epoch": 2.586206896551724, "percentage": 86.21, "elapsed_time": "1:39:53", "remaining_time": "0:15:58"}
|
| 151 |
+
{"current_steps": 302, "total_steps": 348, "loss": 0.93929523229599, "lr": 7.128277176092711e-06, "epoch": 2.603448275862069, "percentage": 86.78, "elapsed_time": "1:40:31", "remaining_time": "0:15:18"}
|
| 152 |
+
{"current_steps": 304, "total_steps": 348, "loss": 0.8182238340377808, "lr": 6.5539946386685424e-06, "epoch": 2.6206896551724137, "percentage": 87.36, "elapsed_time": "1:41:09", "remaining_time": "0:14:38"}
|
| 153 |
+
{"current_steps": 306, "total_steps": 348, "loss": 0.8413806557655334, "lr": 6.01662228006993e-06, "epoch": 2.637931034482759, "percentage": 87.93, "elapsed_time": "1:41:51", "remaining_time": "0:13:58"}
|
| 154 |
+
{"current_steps": 308, "total_steps": 348, "loss": 0.7553901076316833, "lr": 5.5181543189938835e-06, "epoch": 2.655172413793103, "percentage": 88.51, "elapsed_time": "1:42:22", "remaining_time": "0:13:17"}
|
| 155 |
+
{"current_steps": 310, "total_steps": 348, "loss": 0.8772685527801514, "lr": 5.060440597755323e-06, "epoch": 2.6724137931034484, "percentage": 89.08, "elapsed_time": "1:43:09", "remaining_time": "0:12:38"}
|
| 156 |
+
{"current_steps": 312, "total_steps": 348, "loss": 0.9297232031822205, "lr": 4.6451797174195354e-06, "epoch": 2.689655172413793, "percentage": 89.66, "elapsed_time": "1:43:51", "remaining_time": "0:11:59"}
|
| 157 |
+
{"current_steps": 314, "total_steps": 348, "loss": 0.8999103307723999, "lr": 4.273912734199091e-06, "epoch": 2.706896551724138, "percentage": 90.23, "elapsed_time": "1:44:40", "remaining_time": "0:11:20"}
|
| 158 |
+
{"current_steps": 316, "total_steps": 348, "loss": 0.32868385314941406, "lr": 3.948017440508606e-06, "epoch": 2.7241379310344827, "percentage": 90.8, "elapsed_time": "1:45:00", "remaining_time": "0:10:38"}
|
| 159 |
+
{"current_steps": 318, "total_steps": 348, "loss": 1.1546356678009033, "lr": 3.6687032519005313e-06, "epoch": 2.7413793103448274, "percentage": 91.38, "elapsed_time": "1:45:44", "remaining_time": "0:09:58"}
|
| 160 |
+
{"current_steps": 320, "total_steps": 348, "loss": 0.9378973841667175, "lr": 3.4370067188567254e-06, "epoch": 2.7586206896551726, "percentage": 91.95, "elapsed_time": "1:46:33", "remaining_time": "0:09:19"}
|
| 161 |
+
{"current_steps": 322, "total_steps": 348, "loss": 0.6517216563224792, "lr": 3.2537876800920677e-06, "epoch": 2.7758620689655173, "percentage": 92.53, "elapsed_time": "1:47:12", "remaining_time": "0:08:39"}
|
| 162 |
+
{"current_steps": 324, "total_steps": 348, "loss": 0.7699644565582275, "lr": 3.119726071645127e-06, "epoch": 2.793103448275862, "percentage": 93.1, "elapsed_time": "1:47:50", "remaining_time": "0:07:59"}
|
| 163 |
+
{"current_steps": 326, "total_steps": 348, "loss": 1.0573527812957764, "lr": 3.035319403597751e-06, "epoch": 2.810344827586207, "percentage": 93.68, "elapsed_time": "1:48:39", "remaining_time": "0:07:19"}
|
| 164 |
+
{"current_steps": 328, "total_steps": 348, "loss": 1.1476738452911377, "lr": 3.000880913787505e-06, "epoch": 2.8275862068965516, "percentage": 94.25, "elapsed_time": "1:49:26", "remaining_time": "0:06:40"}
|
| 165 |
+
{"current_steps": 330, "total_steps": 348, "loss": 1.1468658447265625, "lr": 3.016538405364614e-06, "epoch": 2.844827586206897, "percentage": 94.83, "elapsed_time": "1:50:16", "remaining_time": "0:06:00"}
|
| 166 |
+
{"current_steps": 332, "total_steps": 348, "loss": 1.062970757484436, "lr": 3.082233772507386e-06, "epoch": 2.862068965517241, "percentage": 95.4, "elapsed_time": "1:51:04", "remaining_time": "0:05:21"}
|
| 167 |
+
{"current_steps": 334, "total_steps": 348, "loss": 0.603091835975647, "lr": 3.1977232160561467e-06, "epoch": 2.8793103448275863, "percentage": 95.98, "elapsed_time": "1:51:40", "remaining_time": "0:04:40"}
|
| 168 |
+
{"current_steps": 336, "total_steps": 348, "loss": 0.7071291208267212, "lr": 3.3625781482654565e-06, "epoch": 2.896551724137931, "percentage": 96.55, "elapsed_time": "1:52:15", "remaining_time": "0:04:00"}
|
| 169 |
+
{"current_steps": 338, "total_steps": 348, "loss": 0.658890962600708, "lr": 3.5761867833170895e-06, "epoch": 2.913793103448276, "percentage": 97.13, "elapsed_time": "1:52:51", "remaining_time": "0:03:20"}
|
| 170 |
+
{"current_steps": 340, "total_steps": 348, "loss": 1.0716102123260498, "lr": 3.8377564076912e-06, "epoch": 2.9310344827586206, "percentage": 97.7, "elapsed_time": "1:53:39", "remaining_time": "0:02:40"}
|
| 171 |
+
{"current_steps": 342, "total_steps": 348, "loss": 1.2615000009536743, "lr": 4.146316321970244e-06, "epoch": 2.9482758620689653, "percentage": 98.28, "elapsed_time": "1:54:20", "remaining_time": "0:02:00"}
|
| 172 |
+
{"current_steps": 344, "total_steps": 348, "loss": 0.8507893681526184, "lr": 4.5007214431585365e-06, "epoch": 2.9655172413793105, "percentage": 98.85, "elapsed_time": "1:55:00", "remaining_time": "0:01:20"}
|
| 173 |
+
{"current_steps": 346, "total_steps": 348, "loss": 0.6322518587112427, "lr": 4.8996565541488335e-06, "epoch": 2.9827586206896552, "percentage": 99.43, "elapsed_time": "1:55:33", "remaining_time": "0:00:40"}
|
| 174 |
+
{"current_steps": 348, "total_steps": 348, "loss": 0.7810978293418884, "lr": 5.341641184566203e-06, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "1:56:09", "remaining_time": "0:00:00"}
|
| 175 |
+
{"current_steps": 348, "total_steps": 348, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "1:56:09", "remaining_time": "0:00:00"}
|
trainer_state.json
ADDED
|
@@ -0,0 +1,1261 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 3.0,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 348,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"epoch": 0.017241379310344827,
|
| 14 |
+
"grad_norm": 0.8816090822219849,
|
| 15 |
+
"learning_rate": 1.6666666666666667e-06,
|
| 16 |
+
"loss": 2.133296012878418,
|
| 17 |
+
"step": 2
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"epoch": 0.034482758620689655,
|
| 21 |
+
"grad_norm": 0.29265427589416504,
|
| 22 |
+
"learning_rate": 4.9999999999999996e-06,
|
| 23 |
+
"loss": 1.8569927215576172,
|
| 24 |
+
"step": 4
|
| 25 |
+
},
|
| 26 |
+
{
|
| 27 |
+
"epoch": 0.05172413793103448,
|
| 28 |
+
"grad_norm": 0.4113790988922119,
|
| 29 |
+
"learning_rate": 8.333333333333334e-06,
|
| 30 |
+
"loss": 1.9297298192977905,
|
| 31 |
+
"step": 6
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"epoch": 0.06896551724137931,
|
| 35 |
+
"grad_norm": 0.501505434513092,
|
| 36 |
+
"learning_rate": 1.1666666666666668e-05,
|
| 37 |
+
"loss": 4.868577480316162,
|
| 38 |
+
"step": 8
|
| 39 |
+
},
|
| 40 |
+
{
|
| 41 |
+
"epoch": 0.08620689655172414,
|
| 42 |
+
"grad_norm": 0.43636268377304077,
|
| 43 |
+
"learning_rate": 1.5e-05,
|
| 44 |
+
"loss": 2.009166955947876,
|
| 45 |
+
"step": 10
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"epoch": 0.10344827586206896,
|
| 49 |
+
"grad_norm": 3.4641079902648926,
|
| 50 |
+
"learning_rate": 1.8333333333333336e-05,
|
| 51 |
+
"loss": 2.0880112648010254,
|
| 52 |
+
"step": 12
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"epoch": 0.1206896551724138,
|
| 56 |
+
"grad_norm": 2.272722005844116,
|
| 57 |
+
"learning_rate": 2.1666666666666667e-05,
|
| 58 |
+
"loss": 2.3606438636779785,
|
| 59 |
+
"step": 14
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.13793103448275862,
|
| 63 |
+
"grad_norm": 0.5703615546226501,
|
| 64 |
+
"learning_rate": 2.5e-05,
|
| 65 |
+
"loss": 1.9572478532791138,
|
| 66 |
+
"step": 16
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"epoch": 0.15517241379310345,
|
| 70 |
+
"grad_norm": 0.23337635397911072,
|
| 71 |
+
"learning_rate": 2.8333333333333332e-05,
|
| 72 |
+
"loss": 1.6358482837677002,
|
| 73 |
+
"step": 18
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"epoch": 0.1724137931034483,
|
| 77 |
+
"grad_norm": 0.2846831679344177,
|
| 78 |
+
"learning_rate": 2.999373614050377e-05,
|
| 79 |
+
"loss": 1.5971399545669556,
|
| 80 |
+
"step": 20
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"epoch": 0.1896551724137931,
|
| 84 |
+
"grad_norm": 0.22005710005760193,
|
| 85 |
+
"learning_rate": 2.9943660135527193e-05,
|
| 86 |
+
"loss": 1.9199273586273193,
|
| 87 |
+
"step": 22
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"epoch": 0.20689655172413793,
|
| 91 |
+
"grad_norm": 0.30223548412323,
|
| 92 |
+
"learning_rate": 2.984369396041239e-05,
|
| 93 |
+
"loss": 1.5466594696044922,
|
| 94 |
+
"step": 24
|
| 95 |
+
},
|
| 96 |
+
{
|
| 97 |
+
"epoch": 0.22413793103448276,
|
| 98 |
+
"grad_norm": 4.415768146514893,
|
| 99 |
+
"learning_rate": 2.9694208595192654e-05,
|
| 100 |
+
"loss": 1.039365530014038,
|
| 101 |
+
"step": 26
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"epoch": 0.2413793103448276,
|
| 105 |
+
"grad_norm": 0.2239375114440918,
|
| 106 |
+
"learning_rate": 2.949575878836866e-05,
|
| 107 |
+
"loss": 1.457193374633789,
|
| 108 |
+
"step": 28
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"epoch": 0.25862068965517243,
|
| 112 |
+
"grad_norm": 0.5077499151229858,
|
| 113 |
+
"learning_rate": 2.9249080998205997e-05,
|
| 114 |
+
"loss": 1.247235894203186,
|
| 115 |
+
"step": 30
|
| 116 |
+
},
|
| 117 |
+
{
|
| 118 |
+
"epoch": 0.27586206896551724,
|
| 119 |
+
"grad_norm": 0.1902107149362564,
|
| 120 |
+
"learning_rate": 2.895509065969754e-05,
|
| 121 |
+
"loss": 1.132712721824646,
|
| 122 |
+
"step": 32
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"epoch": 0.29310344827586204,
|
| 126 |
+
"grad_norm": 0.7180187106132507,
|
| 127 |
+
"learning_rate": 2.861487878733334e-05,
|
| 128 |
+
"loss": 1.1004750728607178,
|
| 129 |
+
"step": 34
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.3103448275862069,
|
| 133 |
+
"grad_norm": 0.19518446922302246,
|
| 134 |
+
"learning_rate": 2.8229707926285193e-05,
|
| 135 |
+
"loss": 1.2603535652160645,
|
| 136 |
+
"step": 36
|
| 137 |
+
},
|
| 138 |
+
{
|
| 139 |
+
"epoch": 0.3275862068965517,
|
| 140 |
+
"grad_norm": 0.2706547975540161,
|
| 141 |
+
"learning_rate": 2.7801007467031303e-05,
|
| 142 |
+
"loss": 1.564131259918213,
|
| 143 |
+
"step": 38
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"epoch": 0.3448275862068966,
|
| 147 |
+
"grad_norm": 0.11646223813295364,
|
| 148 |
+
"learning_rate": 2.7330368340808766e-05,
|
| 149 |
+
"loss": 1.3439611196517944,
|
| 150 |
+
"step": 40
|
| 151 |
+
},
|
| 152 |
+
{
|
| 153 |
+
"epoch": 0.3620689655172414,
|
| 154 |
+
"grad_norm": 0.18652920424938202,
|
| 155 |
+
"learning_rate": 2.68195371155793e-05,
|
| 156 |
+
"loss": 1.653893232345581,
|
| 157 |
+
"step": 42
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"epoch": 0.3793103448275862,
|
| 161 |
+
"grad_norm": 0.35720935463905334,
|
| 162 |
+
"learning_rate": 2.6270409514418447e-05,
|
| 163 |
+
"loss": 1.345064401626587,
|
| 164 |
+
"step": 44
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"epoch": 0.39655172413793105,
|
| 168 |
+
"grad_norm": 0.18193630874156952,
|
| 169 |
+
"learning_rate": 2.568502338038184e-05,
|
| 170 |
+
"loss": 1.3374089002609253,
|
| 171 |
+
"step": 46
|
| 172 |
+
},
|
| 173 |
+
{
|
| 174 |
+
"epoch": 0.41379310344827586,
|
| 175 |
+
"grad_norm": 0.15458394587039948,
|
| 176 |
+
"learning_rate": 2.5065551113956345e-05,
|
| 177 |
+
"loss": 1.3249835968017578,
|
| 178 |
+
"step": 48
|
| 179 |
+
},
|
| 180 |
+
{
|
| 181 |
+
"epoch": 0.43103448275862066,
|
| 182 |
+
"grad_norm": 0.3153666853904724,
|
| 183 |
+
"learning_rate": 2.441429161116107e-05,
|
| 184 |
+
"loss": 1.143474817276001,
|
| 185 |
+
"step": 50
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"epoch": 0.4482758620689655,
|
| 189 |
+
"grad_norm": 0.1796191781759262,
|
| 190 |
+
"learning_rate": 2.3733661732216455e-05,
|
| 191 |
+
"loss": 0.91184401512146,
|
| 192 |
+
"step": 52
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"epoch": 0.46551724137931033,
|
| 196 |
+
"grad_norm": 0.2718876004219055,
|
| 197 |
+
"learning_rate": 2.3026187332441612e-05,
|
| 198 |
+
"loss": 1.1794605255126953,
|
| 199 |
+
"step": 54
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.4827586206896552,
|
| 203 |
+
"grad_norm": 0.154719278216362,
|
| 204 |
+
"learning_rate": 2.2294493888664832e-05,
|
| 205 |
+
"loss": 1.2965553998947144,
|
| 206 |
+
"step": 56
|
| 207 |
+
},
|
| 208 |
+
{
|
| 209 |
+
"epoch": 0.5,
|
| 210 |
+
"grad_norm": 0.14152568578720093,
|
| 211 |
+
"learning_rate": 2.1541296755933106e-05,
|
| 212 |
+
"loss": 1.4213244915008545,
|
| 213 |
+
"step": 58
|
| 214 |
+
},
|
| 215 |
+
{
|
| 216 |
+
"epoch": 0.5172413793103449,
|
| 217 |
+
"grad_norm": 0.15456536412239075,
|
| 218 |
+
"learning_rate": 2.0769391090678594e-05,
|
| 219 |
+
"loss": 1.25790274143219,
|
| 220 |
+
"step": 60
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"epoch": 0.5344827586206896,
|
| 224 |
+
"grad_norm": 0.25589054822921753,
|
| 225 |
+
"learning_rate": 1.998164147773771e-05,
|
| 226 |
+
"loss": 1.2669916152954102,
|
| 227 |
+
"step": 62
|
| 228 |
+
},
|
| 229 |
+
{
|
| 230 |
+
"epoch": 0.5517241379310345,
|
| 231 |
+
"grad_norm": 0.19327247142791748,
|
| 232 |
+
"learning_rate": 1.9180971299717562e-05,
|
| 233 |
+
"loss": 1.0608755350112915,
|
| 234 |
+
"step": 64
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"epoch": 0.5689655172413793,
|
| 238 |
+
"grad_norm": 0.1933823823928833,
|
| 239 |
+
"learning_rate": 1.8370351888160553e-05,
|
| 240 |
+
"loss": 1.2603998184204102,
|
| 241 |
+
"step": 66
|
| 242 |
+
},
|
| 243 |
+
{
|
| 244 |
+
"epoch": 0.5862068965517241,
|
| 245 |
+
"grad_norm": 0.37840721011161804,
|
| 246 |
+
"learning_rate": 1.755279149676784e-05,
|
| 247 |
+
"loss": 0.9561605453491211,
|
| 248 |
+
"step": 68
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.603448275862069,
|
| 252 |
+
"grad_norm": 0.21245446801185608,
|
| 253 |
+
"learning_rate": 1.6731324137602643e-05,
|
| 254 |
+
"loss": 1.0841741561889648,
|
| 255 |
+
"step": 70
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"epoch": 0.6206896551724138,
|
| 259 |
+
"grad_norm": 0.2268361747264862,
|
| 260 |
+
"learning_rate": 1.590899832170276e-05,
|
| 261 |
+
"loss": 0.8117428421974182,
|
| 262 |
+
"step": 72
|
| 263 |
+
},
|
| 264 |
+
{
|
| 265 |
+
"epoch": 0.6379310344827587,
|
| 266 |
+
"grad_norm": 0.36945435404777527,
|
| 267 |
+
"learning_rate": 1.5088865745886681e-05,
|
| 268 |
+
"loss": 0.8893211483955383,
|
| 269 |
+
"step": 74
|
| 270 |
+
},
|
| 271 |
+
{
|
| 272 |
+
"epoch": 0.6551724137931034,
|
| 273 |
+
"grad_norm": 0.43866750597953796,
|
| 274 |
+
"learning_rate": 1.4273969967736997e-05,
|
| 275 |
+
"loss": 1.256456732749939,
|
| 276 |
+
"step": 76
|
| 277 |
+
},
|
| 278 |
+
{
|
| 279 |
+
"epoch": 0.6724137931034483,
|
| 280 |
+
"grad_norm": 0.6824928522109985,
|
| 281 |
+
"learning_rate": 1.3467335110789045e-05,
|
| 282 |
+
"loss": 1.0219424962997437,
|
| 283 |
+
"step": 78
|
| 284 |
+
},
|
| 285 |
+
{
|
| 286 |
+
"epoch": 0.6896551724137931,
|
| 287 |
+
"grad_norm": 0.1600809097290039,
|
| 288 |
+
"learning_rate": 1.267195464184032e-05,
|
| 289 |
+
"loss": 1.3244811296463013,
|
| 290 |
+
"step": 80
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"epoch": 0.7068965517241379,
|
| 294 |
+
"grad_norm": 0.16961514949798584,
|
| 295 |
+
"learning_rate": 1.1890780262028785e-05,
|
| 296 |
+
"loss": 1.2907791137695312,
|
| 297 |
+
"step": 82
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"epoch": 0.7241379310344828,
|
| 301 |
+
"grad_norm": 0.12361691892147064,
|
| 302 |
+
"learning_rate": 1.1126710952905864e-05,
|
| 303 |
+
"loss": 1.0599552392959595,
|
| 304 |
+
"step": 84
|
| 305 |
+
},
|
| 306 |
+
{
|
| 307 |
+
"epoch": 0.7413793103448276,
|
| 308 |
+
"grad_norm": 0.2644699811935425,
|
| 309 |
+
"learning_rate": 1.0382582218154716e-05,
|
| 310 |
+
"loss": 0.931882381439209,
|
| 311 |
+
"step": 86
|
| 312 |
+
},
|
| 313 |
+
{
|
| 314 |
+
"epoch": 0.7586206896551724,
|
| 315 |
+
"grad_norm": 0.3073562681674957,
|
| 316 |
+
"learning_rate": 9.661155560878463e-06,
|
| 317 |
+
"loss": 0.7623745203018188,
|
| 318 |
+
"step": 88
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.7758620689655172,
|
| 322 |
+
"grad_norm": 0.14720334112644196,
|
| 323 |
+
"learning_rate": 8.965108235508627e-06,
|
| 324 |
+
"loss": 0.8441999554634094,
|
| 325 |
+
"step": 90
|
| 326 |
+
},
|
| 327 |
+
{
|
| 328 |
+
"epoch": 0.7931034482758621,
|
| 329 |
+
"grad_norm": 0.29734253883361816,
|
| 330 |
+
"learning_rate": 8.29702331236512e-06,
|
| 331 |
+
"loss": 0.8531256318092346,
|
| 332 |
+
"step": 92
|
| 333 |
+
},
|
| 334 |
+
{
|
| 335 |
+
"epoch": 0.8103448275862069,
|
| 336 |
+
"grad_norm": 0.5815638899803162,
|
| 337 |
+
"learning_rate": 7.659380091738652e-06,
|
| 338 |
+
"loss": 0.8273341655731201,
|
| 339 |
+
"step": 94
|
| 340 |
+
},
|
| 341 |
+
{
|
| 342 |
+
"epoch": 0.8275862068965517,
|
| 343 |
+
"grad_norm": 1.9160736799240112,
|
| 344 |
+
"learning_rate": 7.054544903069566e-06,
|
| 345 |
+
"loss": 0.4431428015232086,
|
| 346 |
+
"step": 96
|
| 347 |
+
},
|
| 348 |
+
{
|
| 349 |
+
"epoch": 0.8448275862068966,
|
| 350 |
+
"grad_norm": 0.16483919322490692,
|
| 351 |
+
"learning_rate": 6.484762323367704e-06,
|
| 352 |
+
"loss": 1.2755130529403687,
|
| 353 |
+
"step": 98
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"epoch": 0.8620689655172413,
|
| 357 |
+
"grad_norm": 0.42587000131607056,
|
| 358 |
+
"learning_rate": 5.9521468474623304e-06,
|
| 359 |
+
"loss": 1.144418716430664,
|
| 360 |
+
"step": 100
|
| 361 |
+
},
|
| 362 |
+
{
|
| 363 |
+
"epoch": 0.8793103448275862,
|
| 364 |
+
"grad_norm": 0.15736447274684906,
|
| 365 |
+
"learning_rate": 5.458675040994306e-06,
|
| 366 |
+
"loss": 1.2321348190307617,
|
| 367 |
+
"step": 102
|
| 368 |
+
},
|
| 369 |
+
{
|
| 370 |
+
"epoch": 0.896551724137931,
|
| 371 |
+
"grad_norm": 0.3543795645236969,
|
| 372 |
+
"learning_rate": 5.006178205271082e-06,
|
| 373 |
+
"loss": 0.9103032946586609,
|
| 374 |
+
"step": 104
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"epoch": 0.9137931034482759,
|
| 378 |
+
"grad_norm": 0.1326935738325119,
|
| 379 |
+
"learning_rate": 4.596335581205745e-06,
|
| 380 |
+
"loss": 1.2454521656036377,
|
| 381 |
+
"step": 106
|
| 382 |
+
},
|
| 383 |
+
{
|
| 384 |
+
"epoch": 0.9310344827586207,
|
| 385 |
+
"grad_norm": 1.112565517425537,
|
| 386 |
+
"learning_rate": 4.230668117560634e-06,
|
| 387 |
+
"loss": 1.1100295782089233,
|
| 388 |
+
"step": 108
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 0.9482758620689655,
|
| 392 |
+
"grad_norm": 0.18412314355373383,
|
| 393 |
+
"learning_rate": 3.910532826621947e-06,
|
| 394 |
+
"loss": 1.0047061443328857,
|
| 395 |
+
"step": 110
|
| 396 |
+
},
|
| 397 |
+
{
|
| 398 |
+
"epoch": 0.9655172413793104,
|
| 399 |
+
"grad_norm": 0.1519099920988083,
|
| 400 |
+
"learning_rate": 3.6371177482516813e-06,
|
| 401 |
+
"loss": 0.7593088150024414,
|
| 402 |
+
"step": 112
|
| 403 |
+
},
|
| 404 |
+
{
|
| 405 |
+
"epoch": 0.9827586206896551,
|
| 406 |
+
"grad_norm": 0.2574796974658966,
|
| 407 |
+
"learning_rate": 3.411437541005667e-06,
|
| 408 |
+
"loss": 0.8851341605186462,
|
| 409 |
+
"step": 114
|
| 410 |
+
},
|
| 411 |
+
{
|
| 412 |
+
"epoch": 1.0,
|
| 413 |
+
"grad_norm": 0.162760391831398,
|
| 414 |
+
"learning_rate": 3.234329716679273e-06,
|
| 415 |
+
"loss": 1.245699405670166,
|
| 416 |
+
"step": 116
|
| 417 |
+
},
|
| 418 |
+
{
|
| 419 |
+
"epoch": 1.0172413793103448,
|
| 420 |
+
"grad_norm": 0.1872638612985611,
|
| 421 |
+
"learning_rate": 3.1064515322545505e-06,
|
| 422 |
+
"loss": 1.221221923828125,
|
| 423 |
+
"step": 118
|
| 424 |
+
},
|
| 425 |
+
{
|
| 426 |
+
"epoch": 1.0344827586206897,
|
| 427 |
+
"grad_norm": 0.21109050512313843,
|
| 428 |
+
"learning_rate": 3.0282775507830497e-06,
|
| 429 |
+
"loss": 0.7024672627449036,
|
| 430 |
+
"step": 120
|
| 431 |
+
},
|
| 432 |
+
{
|
| 433 |
+
"epoch": 1.0517241379310345,
|
| 434 |
+
"grad_norm": 0.14990702271461487,
|
| 435 |
+
"learning_rate": 3.000097880255948e-06,
|
| 436 |
+
"loss": 1.2402740716934204,
|
| 437 |
+
"step": 122
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 1.0689655172413792,
|
| 441 |
+
"grad_norm": 0.23416019976139069,
|
| 442 |
+
"learning_rate": 3.0220170969971693e-06,
|
| 443 |
+
"loss": 1.1297236680984497,
|
| 444 |
+
"step": 124
|
| 445 |
+
},
|
| 446 |
+
{
|
| 447 |
+
"epoch": 1.0862068965517242,
|
| 448 |
+
"grad_norm": 0.19604845345020294,
|
| 449 |
+
"learning_rate": 3.093953857574826e-06,
|
| 450 |
+
"loss": 1.0468320846557617,
|
| 451 |
+
"step": 126
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"epoch": 1.103448275862069,
|
| 455 |
+
"grad_norm": 0.1591433882713318,
|
| 456 |
+
"learning_rate": 3.2156412006712305e-06,
|
| 457 |
+
"loss": 1.190972089767456,
|
| 458 |
+
"step": 128
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"epoch": 1.1206896551724137,
|
| 462 |
+
"grad_norm": 0.21266596019268036,
|
| 463 |
+
"learning_rate": 3.386627537791162e-06,
|
| 464 |
+
"loss": 0.83074551820755,
|
| 465 |
+
"step": 130
|
| 466 |
+
},
|
| 467 |
+
{
|
| 468 |
+
"epoch": 1.1379310344827587,
|
| 469 |
+
"grad_norm": 0.1659928411245346,
|
| 470 |
+
"learning_rate": 3.606278329131906e-06,
|
| 471 |
+
"loss": 1.2269715070724487,
|
| 472 |
+
"step": 132
|
| 473 |
+
},
|
| 474 |
+
{
|
| 475 |
+
"epoch": 1.1551724137931034,
|
| 476 |
+
"grad_norm": 0.3175746500492096,
|
| 477 |
+
"learning_rate": 3.873778438395723e-06,
|
| 478 |
+
"loss": 0.5150075554847717,
|
| 479 |
+
"step": 134
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"epoch": 1.1724137931034484,
|
| 483 |
+
"grad_norm": 0.31136131286621094,
|
| 484 |
+
"learning_rate": 4.1881351578059545e-06,
|
| 485 |
+
"loss": 0.47654953598976135,
|
| 486 |
+
"step": 136
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"epoch": 1.1896551724137931,
|
| 490 |
+
"grad_norm": 0.14184990525245667,
|
| 491 |
+
"learning_rate": 4.5481818921007985e-06,
|
| 492 |
+
"loss": 1.2832177877426147,
|
| 493 |
+
"step": 138
|
| 494 |
+
},
|
| 495 |
+
{
|
| 496 |
+
"epoch": 1.206896551724138,
|
| 497 |
+
"grad_norm": 0.25604957342147827,
|
| 498 |
+
"learning_rate": 4.952582487833158e-06,
|
| 499 |
+
"loss": 1.1527303457260132,
|
| 500 |
+
"step": 140
|
| 501 |
+
},
|
| 502 |
+
{
|
| 503 |
+
"epoch": 1.2241379310344827,
|
| 504 |
+
"grad_norm": 0.13393063843250275,
|
| 505 |
+
"learning_rate": 5.3998361919103724e-06,
|
| 506 |
+
"loss": 1.4890146255493164,
|
| 507 |
+
"step": 142
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 1.2413793103448276,
|
| 511 |
+
"grad_norm": 0.12155193835496902,
|
| 512 |
+
"learning_rate": 5.888283220972366e-06,
|
| 513 |
+
"loss": 1.1686463356018066,
|
| 514 |
+
"step": 144
|
| 515 |
+
},
|
| 516 |
+
{
|
| 517 |
+
"epoch": 1.2586206896551724,
|
| 518 |
+
"grad_norm": 0.38867977261543274,
|
| 519 |
+
"learning_rate": 6.416110920939995e-06,
|
| 520 |
+
"loss": 1.1420594453811646,
|
| 521 |
+
"step": 146
|
| 522 |
+
},
|
| 523 |
+
{
|
| 524 |
+
"epoch": 1.2758620689655173,
|
| 525 |
+
"grad_norm": 0.1920805424451828,
|
| 526 |
+
"learning_rate": 6.981360493875075e-06,
|
| 527 |
+
"loss": 0.8097314834594727,
|
| 528 |
+
"step": 148
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"epoch": 1.293103448275862,
|
| 532 |
+
"grad_norm": 0.2275863140821457,
|
| 533 |
+
"learning_rate": 7.581934267188444e-06,
|
| 534 |
+
"loss": 1.1563327312469482,
|
| 535 |
+
"step": 150
|
| 536 |
+
},
|
| 537 |
+
{
|
| 538 |
+
"epoch": 1.3103448275862069,
|
| 539 |
+
"grad_norm": 0.6193908452987671,
|
| 540 |
+
"learning_rate": 8.215603478219558e-06,
|
| 541 |
+
"loss": 0.7376953959465027,
|
| 542 |
+
"step": 152
|
| 543 |
+
},
|
| 544 |
+
{
|
| 545 |
+
"epoch": 1.3275862068965516,
|
| 546 |
+
"grad_norm": 0.13849061727523804,
|
| 547 |
+
"learning_rate": 8.880016545298615e-06,
|
| 548 |
+
"loss": 0.8840894103050232,
|
| 549 |
+
"step": 154
|
| 550 |
+
},
|
| 551 |
+
{
|
| 552 |
+
"epoch": 1.3448275862068966,
|
| 553 |
+
"grad_norm": 0.4044109284877777,
|
| 554 |
+
"learning_rate": 9.572707794596881e-06,
|
| 555 |
+
"loss": 1.3034617900848389,
|
| 556 |
+
"step": 156
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"epoch": 1.3620689655172413,
|
| 560 |
+
"grad_norm": 0.1762719750404358,
|
| 561 |
+
"learning_rate": 1.0291106610379316e-05,
|
| 562 |
+
"loss": 1.2110811471939087,
|
| 563 |
+
"step": 158
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"epoch": 1.3793103448275863,
|
| 567 |
+
"grad_norm": 0.21315965056419373,
|
| 568 |
+
"learning_rate": 1.1032546974702523e-05,
|
| 569 |
+
"loss": 1.174965262413025,
|
| 570 |
+
"step": 160
|
| 571 |
+
},
|
| 572 |
+
{
|
| 573 |
+
"epoch": 1.396551724137931,
|
| 574 |
+
"grad_norm": 0.1833634078502655,
|
| 575 |
+
"learning_rate": 1.1794277361155493e-05,
|
| 576 |
+
"loss": 0.9785462021827698,
|
| 577 |
+
"step": 162
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 1.4137931034482758,
|
| 581 |
+
"grad_norm": 0.2881973683834076,
|
| 582 |
+
"learning_rate": 1.2573470945927114e-05,
|
| 583 |
+
"loss": 0.7486613988876343,
|
| 584 |
+
"step": 164
|
| 585 |
+
},
|
| 586 |
+
{
|
| 587 |
+
"epoch": 1.4310344827586206,
|
| 588 |
+
"grad_norm": 0.1457541584968567,
|
| 589 |
+
"learning_rate": 1.3367236098306466e-05,
|
| 590 |
+
"loss": 1.2081069946289062,
|
| 591 |
+
"step": 166
|
| 592 |
+
},
|
| 593 |
+
{
|
| 594 |
+
"epoch": 1.4482758620689655,
|
| 595 |
+
"grad_norm": 0.22875168919563293,
|
| 596 |
+
"learning_rate": 1.4172627111685208e-05,
|
| 597 |
+
"loss": 1.2631800174713135,
|
| 598 |
+
"step": 168
|
| 599 |
+
},
|
| 600 |
+
{
|
| 601 |
+
"epoch": 1.4655172413793103,
|
| 602 |
+
"grad_norm": 0.14663054049015045,
|
| 603 |
+
"learning_rate": 1.4986655135238434e-05,
|
| 604 |
+
"loss": 0.8129775524139404,
|
| 605 |
+
"step": 170
|
| 606 |
+
},
|
| 607 |
+
{
|
| 608 |
+
"epoch": 1.4827586206896552,
|
| 609 |
+
"grad_norm": 0.18815924227237701,
|
| 610 |
+
"learning_rate": 1.5806299265716396e-05,
|
| 611 |
+
"loss": 1.2445049285888672,
|
| 612 |
+
"step": 172
|
| 613 |
+
},
|
| 614 |
+
{
|
| 615 |
+
"epoch": 1.5,
|
| 616 |
+
"grad_norm": 0.23885682225227356,
|
| 617 |
+
"learning_rate": 1.6628517758184285e-05,
|
| 618 |
+
"loss": 0.7052794694900513,
|
| 619 |
+
"step": 174
|
| 620 |
+
},
|
| 621 |
+
{
|
| 622 |
+
"epoch": 1.5172413793103448,
|
| 623 |
+
"grad_norm": 0.13497234880924225,
|
| 624 |
+
"learning_rate": 1.7450259314106843e-05,
|
| 625 |
+
"loss": 1.4724665880203247,
|
| 626 |
+
"step": 176
|
| 627 |
+
},
|
| 628 |
+
{
|
| 629 |
+
"epoch": 1.5344827586206895,
|
| 630 |
+
"grad_norm": 0.5048268437385559,
|
| 631 |
+
"learning_rate": 1.826847440488684e-05,
|
| 632 |
+
"loss": 0.32006025314331055,
|
| 633 |
+
"step": 178
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"epoch": 1.5517241379310345,
|
| 637 |
+
"grad_norm": 0.8052246570587158,
|
| 638 |
+
"learning_rate": 1.9080126588835274e-05,
|
| 639 |
+
"loss": 0.29676616191864014,
|
| 640 |
+
"step": 180
|
| 641 |
+
},
|
| 642 |
+
{
|
| 643 |
+
"epoch": 1.5689655172413794,
|
| 644 |
+
"grad_norm": 0.43781599402427673,
|
| 645 |
+
"learning_rate": 1.9882203779575198e-05,
|
| 646 |
+
"loss": 1.1562625169754028,
|
| 647 |
+
"step": 182
|
| 648 |
+
},
|
| 649 |
+
{
|
| 650 |
+
"epoch": 1.5862068965517242,
|
| 651 |
+
"grad_norm": 0.1728672832250595,
|
| 652 |
+
"learning_rate": 2.0671729424061787e-05,
|
| 653 |
+
"loss": 0.9202221632003784,
|
| 654 |
+
"step": 184
|
| 655 |
+
},
|
| 656 |
+
{
|
| 657 |
+
"epoch": 1.603448275862069,
|
| 658 |
+
"grad_norm": 0.5775326490402222,
|
| 659 |
+
"learning_rate": 2.144577354873615e-05,
|
| 660 |
+
"loss": 1.2155585289001465,
|
| 661 |
+
"step": 186
|
| 662 |
+
},
|
| 663 |
+
{
|
| 664 |
+
"epoch": 1.6206896551724137,
|
| 665 |
+
"grad_norm": 0.17796771228313446,
|
| 666 |
+
"learning_rate": 2.220146363282005e-05,
|
| 667 |
+
"loss": 1.167516827583313,
|
| 668 |
+
"step": 188
|
| 669 |
+
},
|
| 670 |
+
{
|
| 671 |
+
"epoch": 1.6379310344827587,
|
| 672 |
+
"grad_norm": 0.3755885064601898,
|
| 673 |
+
"learning_rate": 2.2935995268400318e-05,
|
| 674 |
+
"loss": 1.1323333978652954,
|
| 675 |
+
"step": 190
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"epoch": 1.6551724137931034,
|
| 679 |
+
"grad_norm": 0.21519975364208221,
|
| 680 |
+
"learning_rate": 2.3646642567742345e-05,
|
| 681 |
+
"loss": 0.9196767807006836,
|
| 682 |
+
"step": 192
|
| 683 |
+
},
|
| 684 |
+
{
|
| 685 |
+
"epoch": 1.6724137931034484,
|
| 686 |
+
"grad_norm": 0.16918060183525085,
|
| 687 |
+
"learning_rate": 2.433076827921117e-05,
|
| 688 |
+
"loss": 0.9174919724464417,
|
| 689 |
+
"step": 194
|
| 690 |
+
},
|
| 691 |
+
{
|
| 692 |
+
"epoch": 1.6896551724137931,
|
| 693 |
+
"grad_norm": 0.20906352996826172,
|
| 694 |
+
"learning_rate": 2.4985833574258856e-05,
|
| 695 |
+
"loss": 1.3593827486038208,
|
| 696 |
+
"step": 196
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.706896551724138,
|
| 700 |
+
"grad_norm": 0.620851457118988,
|
| 701 |
+
"learning_rate": 2.560940746915833e-05,
|
| 702 |
+
"loss": 0.9581778645515442,
|
| 703 |
+
"step": 198
|
| 704 |
+
},
|
| 705 |
+
{
|
| 706 |
+
"epoch": 1.7241379310344827,
|
| 707 |
+
"grad_norm": 0.5852736830711365,
|
| 708 |
+
"learning_rate": 2.619917584651918e-05,
|
| 709 |
+
"loss": 0.6433994174003601,
|
| 710 |
+
"step": 200
|
| 711 |
+
},
|
| 712 |
+
{
|
| 713 |
+
"epoch": 1.7413793103448276,
|
| 714 |
+
"grad_norm": 0.18479883670806885,
|
| 715 |
+
"learning_rate": 2.6752950043105974e-05,
|
| 716 |
+
"loss": 1.1657538414001465,
|
| 717 |
+
"step": 202
|
| 718 |
+
},
|
| 719 |
+
{
|
| 720 |
+
"epoch": 1.7586206896551724,
|
| 721 |
+
"grad_norm": 0.18072469532489777,
|
| 722 |
+
"learning_rate": 2.7268674972089096e-05,
|
| 723 |
+
"loss": 1.1429758071899414,
|
| 724 |
+
"step": 204
|
| 725 |
+
},
|
| 726 |
+
{
|
| 727 |
+
"epoch": 1.7758620689655173,
|
| 728 |
+
"grad_norm": 0.2669624388217926,
|
| 729 |
+
"learning_rate": 2.7744436749586343e-05,
|
| 730 |
+
"loss": 1.2312031984329224,
|
| 731 |
+
"step": 206
|
| 732 |
+
},
|
| 733 |
+
{
|
| 734 |
+
"epoch": 1.793103448275862,
|
| 735 |
+
"grad_norm": 0.16005530953407288,
|
| 736 |
+
"learning_rate": 2.8178469797192326e-05,
|
| 737 |
+
"loss": 1.1482793092727661,
|
| 738 |
+
"step": 208
|
| 739 |
+
},
|
| 740 |
+
{
|
| 741 |
+
"epoch": 1.8103448275862069,
|
| 742 |
+
"grad_norm": 0.24072809517383575,
|
| 743 |
+
"learning_rate": 2.856916339413811e-05,
|
| 744 |
+
"loss": 1.1373822689056396,
|
| 745 |
+
"step": 210
|
| 746 |
+
},
|
| 747 |
+
{
|
| 748 |
+
"epoch": 1.8275862068965516,
|
| 749 |
+
"grad_norm": 0.1878095120191574,
|
| 750 |
+
"learning_rate": 2.8915067654765562e-05,
|
| 751 |
+
"loss": 0.8796662092208862,
|
| 752 |
+
"step": 212
|
| 753 |
+
},
|
| 754 |
+
{
|
| 755 |
+
"epoch": 1.8448275862068966,
|
| 756 |
+
"grad_norm": 0.21740852296352386,
|
| 757 |
+
"learning_rate": 2.9214898909133586e-05,
|
| 758 |
+
"loss": 1.0384447574615479,
|
| 759 |
+
"step": 214
|
| 760 |
+
},
|
| 761 |
+
{
|
| 762 |
+
"epoch": 1.8620689655172413,
|
| 763 |
+
"grad_norm": 0.9018149971961975,
|
| 764 |
+
"learning_rate": 2.946754446678865e-05,
|
| 765 |
+
"loss": 0.5491471290588379,
|
| 766 |
+
"step": 216
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 1.8793103448275863,
|
| 770 |
+
"grad_norm": 0.13663442432880402,
|
| 771 |
+
"learning_rate": 2.967206674602094e-05,
|
| 772 |
+
"loss": 0.8381883502006531,
|
| 773 |
+
"step": 218
|
| 774 |
+
},
|
| 775 |
+
{
|
| 776 |
+
"epoch": 1.896551724137931,
|
| 777 |
+
"grad_norm": 0.17705294489860535,
|
| 778 |
+
"learning_rate": 2.98277067532822e-05,
|
| 779 |
+
"loss": 0.8339143991470337,
|
| 780 |
+
"step": 220
|
| 781 |
+
},
|
| 782 |
+
{
|
| 783 |
+
"epoch": 1.9137931034482758,
|
| 784 |
+
"grad_norm": 0.6685168743133545,
|
| 785 |
+
"learning_rate": 2.993388689985295e-05,
|
| 786 |
+
"loss": 0.8806865215301514,
|
| 787 |
+
"step": 222
|
| 788 |
+
},
|
| 789 |
+
{
|
| 790 |
+
"epoch": 1.9310344827586206,
|
| 791 |
+
"grad_norm": 0.27922579646110535,
|
| 792 |
+
"learning_rate": 2.9990213145306393e-05,
|
| 793 |
+
"loss": 1.1154509782791138,
|
| 794 |
+
"step": 224
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"epoch": 1.9482758620689655,
|
| 798 |
+
"grad_norm": 0.42925339937210083,
|
| 799 |
+
"learning_rate": 2.999647645981429e-05,
|
| 800 |
+
"loss": 0.8699172735214233,
|
| 801 |
+
"step": 226
|
| 802 |
+
},
|
| 803 |
+
{
|
| 804 |
+
"epoch": 1.9655172413793105,
|
| 805 |
+
"grad_norm": 0.30161452293395996,
|
| 806 |
+
"learning_rate": 2.995265359986831e-05,
|
| 807 |
+
"loss": 1.0112342834472656,
|
| 808 |
+
"step": 228
|
| 809 |
+
},
|
| 810 |
+
{
|
| 811 |
+
"epoch": 1.9827586206896552,
|
| 812 |
+
"grad_norm": 0.5354682803153992,
|
| 813 |
+
"learning_rate": 2.9858907194537958e-05,
|
| 814 |
+
"loss": 1.221873164176941,
|
| 815 |
+
"step": 230
|
| 816 |
+
},
|
| 817 |
+
{
|
| 818 |
+
"epoch": 2.0,
|
| 819 |
+
"grad_norm": 0.1859283596277237,
|
| 820 |
+
"learning_rate": 2.9715585141945073e-05,
|
| 821 |
+
"loss": 0.8529492616653442,
|
| 822 |
+
"step": 232
|
| 823 |
+
},
|
| 824 |
+
{
|
| 825 |
+
"epoch": 2.0172413793103448,
|
| 826 |
+
"grad_norm": 0.22067049145698547,
|
| 827 |
+
"learning_rate": 2.9523219318194555e-05,
|
| 828 |
+
"loss": 1.0910695791244507,
|
| 829 |
+
"step": 234
|
| 830 |
+
},
|
| 831 |
+
{
|
| 832 |
+
"epoch": 2.0344827586206895,
|
| 833 |
+
"grad_norm": 0.4083843231201172,
|
| 834 |
+
"learning_rate": 2.9282523603552557e-05,
|
| 835 |
+
"loss": 0.39936938881874084,
|
| 836 |
+
"step": 236
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 2.0517241379310347,
|
| 840 |
+
"grad_norm": 0.22413796186447144,
|
| 841 |
+
"learning_rate": 2.899439123319714e-05,
|
| 842 |
+
"loss": 0.6455128788948059,
|
| 843 |
+
"step": 238
|
| 844 |
+
},
|
| 845 |
+
{
|
| 846 |
+
"epoch": 2.0689655172413794,
|
| 847 |
+
"grad_norm": 0.44840389490127563,
|
| 848 |
+
"learning_rate": 2.86598914823729e-05,
|
| 849 |
+
"loss": 1.0364433526992798,
|
| 850 |
+
"step": 240
|
| 851 |
+
},
|
| 852 |
+
{
|
| 853 |
+
"epoch": 2.086206896551724,
|
| 854 |
+
"grad_norm": 0.245926633477211,
|
| 855 |
+
"learning_rate": 2.8280265698251005e-05,
|
| 856 |
+
"loss": 0.6382094025611877,
|
| 857 |
+
"step": 242
|
| 858 |
+
},
|
| 859 |
+
{
|
| 860 |
+
"epoch": 2.103448275862069,
|
| 861 |
+
"grad_norm": 1.7387171983718872,
|
| 862 |
+
"learning_rate": 2.7856922693220947e-05,
|
| 863 |
+
"loss": 0.6532158255577087,
|
| 864 |
+
"step": 244
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"epoch": 2.1206896551724137,
|
| 868 |
+
"grad_norm": 0.3799879848957062,
|
| 869 |
+
"learning_rate": 2.739143351670935e-05,
|
| 870 |
+
"loss": 0.8184542655944824,
|
| 871 |
+
"step": 246
|
| 872 |
+
},
|
| 873 |
+
{
|
| 874 |
+
"epoch": 2.1379310344827585,
|
| 875 |
+
"grad_norm": 0.25618430972099304,
|
| 876 |
+
"learning_rate": 2.688552562492825e-05,
|
| 877 |
+
"loss": 0.675823986530304,
|
| 878 |
+
"step": 248
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"epoch": 2.1551724137931036,
|
| 882 |
+
"grad_norm": 0.32830527424812317,
|
| 883 |
+
"learning_rate": 2.6341076470189065e-05,
|
| 884 |
+
"loss": 0.7770854830741882,
|
| 885 |
+
"step": 250
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 2.1724137931034484,
|
| 889 |
+
"grad_norm": 0.15962277352809906,
|
| 890 |
+
"learning_rate": 2.5760106533572648e-05,
|
| 891 |
+
"loss": 0.713956356048584,
|
| 892 |
+
"step": 252
|
| 893 |
+
},
|
| 894 |
+
{
|
| 895 |
+
"epoch": 2.189655172413793,
|
| 896 |
+
"grad_norm": 0.42531585693359375,
|
| 897 |
+
"learning_rate": 2.5144771826811748e-05,
|
| 898 |
+
"loss": 0.4886062741279602,
|
| 899 |
+
"step": 254
|
| 900 |
+
},
|
| 901 |
+
{
|
| 902 |
+
"epoch": 2.206896551724138,
|
| 903 |
+
"grad_norm": 0.23626545071601868,
|
| 904 |
+
"learning_rate": 2.4497355891211547e-05,
|
| 905 |
+
"loss": 1.1009202003479004,
|
| 906 |
+
"step": 256
|
| 907 |
+
},
|
| 908 |
+
{
|
| 909 |
+
"epoch": 2.2241379310344827,
|
| 910 |
+
"grad_norm": 0.29544082283973694,
|
| 911 |
+
"learning_rate": 2.3820261323300925e-05,
|
| 912 |
+
"loss": 1.0635813474655151,
|
| 913 |
+
"step": 258
|
| 914 |
+
},
|
| 915 |
+
{
|
| 916 |
+
"epoch": 2.2413793103448274,
|
| 917 |
+
"grad_norm": 0.3267728090286255,
|
| 918 |
+
"learning_rate": 2.311600085866322e-05,
|
| 919 |
+
"loss": 0.9566066265106201,
|
| 920 |
+
"step": 260
|
| 921 |
+
},
|
| 922 |
+
{
|
| 923 |
+
"epoch": 2.2586206896551726,
|
| 924 |
+
"grad_norm": 0.09956636279821396,
|
| 925 |
+
"learning_rate": 2.238718804703476e-05,
|
| 926 |
+
"loss": 0.6884672045707703,
|
| 927 |
+
"step": 262
|
| 928 |
+
},
|
| 929 |
+
{
|
| 930 |
+
"epoch": 2.2758620689655173,
|
| 931 |
+
"grad_norm": 0.15193510055541992,
|
| 932 |
+
"learning_rate": 2.163652755327646e-05,
|
| 933 |
+
"loss": 0.9428229928016663,
|
| 934 |
+
"step": 264
|
| 935 |
+
},
|
| 936 |
+
{
|
| 937 |
+
"epoch": 2.293103448275862,
|
| 938 |
+
"grad_norm": 0.24053514003753662,
|
| 939 |
+
"learning_rate": 2.0866805120212266e-05,
|
| 940 |
+
"loss": 0.6498053669929504,
|
| 941 |
+
"step": 266
|
| 942 |
+
},
|
| 943 |
+
{
|
| 944 |
+
"epoch": 2.310344827586207,
|
| 945 |
+
"grad_norm": 0.13813968002796173,
|
| 946 |
+
"learning_rate": 2.0080877230582565e-05,
|
| 947 |
+
"loss": 1.1316256523132324,
|
| 948 |
+
"step": 268
|
| 949 |
+
},
|
| 950 |
+
{
|
| 951 |
+
"epoch": 2.3275862068965516,
|
| 952 |
+
"grad_norm": 0.3910239636898041,
|
| 953 |
+
"learning_rate": 1.9281660506477953e-05,
|
| 954 |
+
"loss": 1.0687792301177979,
|
| 955 |
+
"step": 270
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 2.344827586206897,
|
| 959 |
+
"grad_norm": 0.23670506477355957,
|
| 960 |
+
"learning_rate": 1.8472120885592557e-05,
|
| 961 |
+
"loss": 1.125198245048523,
|
| 962 |
+
"step": 272
|
| 963 |
+
},
|
| 964 |
+
{
|
| 965 |
+
"epoch": 2.3620689655172415,
|
| 966 |
+
"grad_norm": 0.7727434635162354,
|
| 967 |
+
"learning_rate": 1.7655262614464347e-05,
|
| 968 |
+
"loss": 0.5221872329711914,
|
| 969 |
+
"step": 274
|
| 970 |
+
},
|
| 971 |
+
{
|
| 972 |
+
"epoch": 2.3793103448275863,
|
| 973 |
+
"grad_norm": 0.5010542869567871,
|
| 974 |
+
"learning_rate": 1.683411709954916e-05,
|
| 975 |
+
"loss": 1.0470247268676758,
|
| 976 |
+
"step": 276
|
| 977 |
+
},
|
| 978 |
+
{
|
| 979 |
+
"epoch": 2.396551724137931,
|
| 980 |
+
"grad_norm": 1.5273538827896118,
|
| 981 |
+
"learning_rate": 1.6011731657502948e-05,
|
| 982 |
+
"loss": 0.6991406679153442,
|
| 983 |
+
"step": 278
|
| 984 |
+
},
|
| 985 |
+
{
|
| 986 |
+
"epoch": 2.413793103448276,
|
| 987 |
+
"grad_norm": 0.12472137063741684,
|
| 988 |
+
"learning_rate": 1.5191158206420104e-05,
|
| 989 |
+
"loss": 1.0698401927947998,
|
| 990 |
+
"step": 280
|
| 991 |
+
},
|
| 992 |
+
{
|
| 993 |
+
"epoch": 2.4310344827586206,
|
| 994 |
+
"grad_norm": 0.40073373913764954,
|
| 995 |
+
"learning_rate": 1.4375441939995862e-05,
|
| 996 |
+
"loss": 0.9676493406295776,
|
| 997 |
+
"step": 282
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"epoch": 2.4482758620689653,
|
| 1001 |
+
"grad_norm": 0.29846158623695374,
|
| 1002 |
+
"learning_rate": 1.3567610026643158e-05,
|
| 1003 |
+
"loss": 0.8159676194190979,
|
| 1004 |
+
"step": 284
|
| 1005 |
+
},
|
| 1006 |
+
{
|
| 1007 |
+
"epoch": 2.4655172413793105,
|
| 1008 |
+
"grad_norm": 0.0978626236319542,
|
| 1009 |
+
"learning_rate": 1.2770660375502325e-05,
|
| 1010 |
+
"loss": 0.731606125831604,
|
| 1011 |
+
"step": 286
|
| 1012 |
+
},
|
| 1013 |
+
{
|
| 1014 |
+
"epoch": 2.4827586206896552,
|
| 1015 |
+
"grad_norm": 0.1449108123779297,
|
| 1016 |
+
"learning_rate": 1.1987550511033729e-05,
|
| 1017 |
+
"loss": 0.9657235145568848,
|
| 1018 |
+
"step": 288
|
| 1019 |
+
},
|
| 1020 |
+
{
|
| 1021 |
+
"epoch": 2.5,
|
| 1022 |
+
"grad_norm": 0.7278546690940857,
|
| 1023 |
+
"learning_rate": 1.1221186597479921e-05,
|
| 1024 |
+
"loss": 0.6879425644874573,
|
| 1025 |
+
"step": 290
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 2.5172413793103448,
|
| 1029 |
+
"grad_norm": 0.1562328189611435,
|
| 1030 |
+
"learning_rate": 1.047441265392866e-05,
|
| 1031 |
+
"loss": 0.6442292928695679,
|
| 1032 |
+
"step": 292
|
| 1033 |
+
},
|
| 1034 |
+
{
|
| 1035 |
+
"epoch": 2.5344827586206895,
|
| 1036 |
+
"grad_norm": 0.412852942943573,
|
| 1037 |
+
"learning_rate": 9.74999999999999e-06,
|
| 1038 |
+
"loss": 1.2038459777832031,
|
| 1039 |
+
"step": 294
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"epoch": 2.5517241379310347,
|
| 1043 |
+
"grad_norm": 0.8354277014732361,
|
| 1044 |
+
"learning_rate": 9.05063697132504e-06,
|
| 1045 |
+
"loss": 0.6161110401153564,
|
| 1046 |
+
"step": 296
|
| 1047 |
+
},
|
| 1048 |
+
{
|
| 1049 |
+
"epoch": 2.5689655172413794,
|
| 1050 |
+
"grad_norm": 0.3308437466621399,
|
| 1051 |
+
"learning_rate": 8.378918942982984e-06,
|
| 1052 |
+
"loss": 0.4709848463535309,
|
| 1053 |
+
"step": 298
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"epoch": 2.586206896551724,
|
| 1057 |
+
"grad_norm": 0.15517888963222504,
|
| 1058 |
+
"learning_rate": 7.7373386979199e-06,
|
| 1059 |
+
"loss": 0.9857034683227539,
|
| 1060 |
+
"step": 300
|
| 1061 |
+
},
|
| 1062 |
+
{
|
| 1063 |
+
"epoch": 2.603448275862069,
|
| 1064 |
+
"grad_norm": 0.8485265970230103,
|
| 1065 |
+
"learning_rate": 7.128277176092711e-06,
|
| 1066 |
+
"loss": 0.93929523229599,
|
| 1067 |
+
"step": 302
|
| 1068 |
+
},
|
| 1069 |
+
{
|
| 1070 |
+
"epoch": 2.6206896551724137,
|
| 1071 |
+
"grad_norm": 0.7992085218429565,
|
| 1072 |
+
"learning_rate": 6.5539946386685424e-06,
|
| 1073 |
+
"loss": 0.8182238340377808,
|
| 1074 |
+
"step": 304
|
| 1075 |
+
},
|
| 1076 |
+
{
|
| 1077 |
+
"epoch": 2.637931034482759,
|
| 1078 |
+
"grad_norm": 0.15321463346481323,
|
| 1079 |
+
"learning_rate": 6.01662228006993e-06,
|
| 1080 |
+
"loss": 0.8413806557655334,
|
| 1081 |
+
"step": 306
|
| 1082 |
+
},
|
| 1083 |
+
{
|
| 1084 |
+
"epoch": 2.655172413793103,
|
| 1085 |
+
"grad_norm": 0.26136964559555054,
|
| 1086 |
+
"learning_rate": 5.5181543189938835e-06,
|
| 1087 |
+
"loss": 0.7553901076316833,
|
| 1088 |
+
"step": 308
|
| 1089 |
+
},
|
| 1090 |
+
{
|
| 1091 |
+
"epoch": 2.6724137931034484,
|
| 1092 |
+
"grad_norm": 0.6263846158981323,
|
| 1093 |
+
"learning_rate": 5.060440597755323e-06,
|
| 1094 |
+
"loss": 0.8772685527801514,
|
| 1095 |
+
"step": 310
|
| 1096 |
+
},
|
| 1097 |
+
{
|
| 1098 |
+
"epoch": 2.689655172413793,
|
| 1099 |
+
"grad_norm": 0.1620139181613922,
|
| 1100 |
+
"learning_rate": 4.6451797174195354e-06,
|
| 1101 |
+
"loss": 0.9297232031822205,
|
| 1102 |
+
"step": 312
|
| 1103 |
+
},
|
| 1104 |
+
{
|
| 1105 |
+
"epoch": 2.706896551724138,
|
| 1106 |
+
"grad_norm": 0.14144213497638702,
|
| 1107 |
+
"learning_rate": 4.273912734199091e-06,
|
| 1108 |
+
"loss": 0.8999103307723999,
|
| 1109 |
+
"step": 314
|
| 1110 |
+
},
|
| 1111 |
+
{
|
| 1112 |
+
"epoch": 2.7241379310344827,
|
| 1113 |
+
"grad_norm": 0.4446767568588257,
|
| 1114 |
+
"learning_rate": 3.948017440508606e-06,
|
| 1115 |
+
"loss": 0.32868385314941406,
|
| 1116 |
+
"step": 316
|
| 1117 |
+
},
|
| 1118 |
+
{
|
| 1119 |
+
"epoch": 2.7413793103448274,
|
| 1120 |
+
"grad_norm": 0.2091308832168579,
|
| 1121 |
+
"learning_rate": 3.6687032519005313e-06,
|
| 1122 |
+
"loss": 1.1546356678009033,
|
| 1123 |
+
"step": 318
|
| 1124 |
+
},
|
| 1125 |
+
{
|
| 1126 |
+
"epoch": 2.7586206896551726,
|
| 1127 |
+
"grad_norm": 0.1578148901462555,
|
| 1128 |
+
"learning_rate": 3.4370067188567254e-06,
|
| 1129 |
+
"loss": 0.9378973841667175,
|
| 1130 |
+
"step": 320
|
| 1131 |
+
},
|
| 1132 |
+
{
|
| 1133 |
+
"epoch": 2.7758620689655173,
|
| 1134 |
+
"grad_norm": 1.2611632347106934,
|
| 1135 |
+
"learning_rate": 3.2537876800920677e-06,
|
| 1136 |
+
"loss": 0.6517216563224792,
|
| 1137 |
+
"step": 322
|
| 1138 |
+
},
|
| 1139 |
+
{
|
| 1140 |
+
"epoch": 2.793103448275862,
|
| 1141 |
+
"grad_norm": 0.19654294848442078,
|
| 1142 |
+
"learning_rate": 3.119726071645127e-06,
|
| 1143 |
+
"loss": 0.7699644565582275,
|
| 1144 |
+
"step": 324
|
| 1145 |
+
},
|
| 1146 |
+
{
|
| 1147 |
+
"epoch": 2.810344827586207,
|
| 1148 |
+
"grad_norm": 0.2935585379600525,
|
| 1149 |
+
"learning_rate": 3.035319403597751e-06,
|
| 1150 |
+
"loss": 1.0573527812957764,
|
| 1151 |
+
"step": 326
|
| 1152 |
+
},
|
| 1153 |
+
{
|
| 1154 |
+
"epoch": 2.8275862068965516,
|
| 1155 |
+
"grad_norm": 0.18080638349056244,
|
| 1156 |
+
"learning_rate": 3.000880913787505e-06,
|
| 1157 |
+
"loss": 1.1476738452911377,
|
| 1158 |
+
"step": 328
|
| 1159 |
+
},
|
| 1160 |
+
{
|
| 1161 |
+
"epoch": 2.844827586206897,
|
| 1162 |
+
"grad_norm": 0.1654699146747589,
|
| 1163 |
+
"learning_rate": 3.016538405364614e-06,
|
| 1164 |
+
"loss": 1.1468658447265625,
|
| 1165 |
+
"step": 330
|
| 1166 |
+
},
|
| 1167 |
+
{
|
| 1168 |
+
"epoch": 2.862068965517241,
|
| 1169 |
+
"grad_norm": 0.17703653872013092,
|
| 1170 |
+
"learning_rate": 3.082233772507386e-06,
|
| 1171 |
+
"loss": 1.062970757484436,
|
| 1172 |
+
"step": 332
|
| 1173 |
+
},
|
| 1174 |
+
{
|
| 1175 |
+
"epoch": 2.8793103448275863,
|
| 1176 |
+
"grad_norm": 0.37376028299331665,
|
| 1177 |
+
"learning_rate": 3.1977232160561467e-06,
|
| 1178 |
+
"loss": 0.603091835975647,
|
| 1179 |
+
"step": 334
|
| 1180 |
+
},
|
| 1181 |
+
{
|
| 1182 |
+
"epoch": 2.896551724137931,
|
| 1183 |
+
"grad_norm": 0.19850152730941772,
|
| 1184 |
+
"learning_rate": 3.3625781482654565e-06,
|
| 1185 |
+
"loss": 0.7071291208267212,
|
| 1186 |
+
"step": 336
|
| 1187 |
+
},
|
| 1188 |
+
{
|
| 1189 |
+
"epoch": 2.913793103448276,
|
| 1190 |
+
"grad_norm": 0.1782890260219574,
|
| 1191 |
+
"learning_rate": 3.5761867833170895e-06,
|
| 1192 |
+
"loss": 0.658890962600708,
|
| 1193 |
+
"step": 338
|
| 1194 |
+
},
|
| 1195 |
+
{
|
| 1196 |
+
"epoch": 2.9310344827586206,
|
| 1197 |
+
"grad_norm": 0.18476705253124237,
|
| 1198 |
+
"learning_rate": 3.8377564076912e-06,
|
| 1199 |
+
"loss": 1.0716102123260498,
|
| 1200 |
+
"step": 340
|
| 1201 |
+
},
|
| 1202 |
+
{
|
| 1203 |
+
"epoch": 2.9482758620689653,
|
| 1204 |
+
"grad_norm": 0.30881527066230774,
|
| 1205 |
+
"learning_rate": 4.146316321970244e-06,
|
| 1206 |
+
"loss": 1.2615000009536743,
|
| 1207 |
+
"step": 342
|
| 1208 |
+
},
|
| 1209 |
+
{
|
| 1210 |
+
"epoch": 2.9655172413793105,
|
| 1211 |
+
"grad_norm": 0.3074623644351959,
|
| 1212 |
+
"learning_rate": 4.5007214431585365e-06,
|
| 1213 |
+
"loss": 0.8507893681526184,
|
| 1214 |
+
"step": 344
|
| 1215 |
+
},
|
| 1216 |
+
{
|
| 1217 |
+
"epoch": 2.9827586206896552,
|
| 1218 |
+
"grad_norm": 0.21223706007003784,
|
| 1219 |
+
"learning_rate": 4.8996565541488335e-06,
|
| 1220 |
+
"loss": 0.6322518587112427,
|
| 1221 |
+
"step": 346
|
| 1222 |
+
},
|
| 1223 |
+
{
|
| 1224 |
+
"epoch": 3.0,
|
| 1225 |
+
"grad_norm": 0.4978598952293396,
|
| 1226 |
+
"learning_rate": 5.341641184566203e-06,
|
| 1227 |
+
"loss": 0.7810978293418884,
|
| 1228 |
+
"step": 348
|
| 1229 |
+
},
|
| 1230 |
+
{
|
| 1231 |
+
"epoch": 3.0,
|
| 1232 |
+
"step": 348,
|
| 1233 |
+
"total_flos": 2.7756656592022405e+18,
|
| 1234 |
+
"train_loss": 1.0638441426315526,
|
| 1235 |
+
"train_runtime": 6969.7468,
|
| 1236 |
+
"train_samples_per_second": 5.992,
|
| 1237 |
+
"train_steps_per_second": 0.05
|
| 1238 |
+
}
|
| 1239 |
+
],
|
| 1240 |
+
"logging_steps": 2,
|
| 1241 |
+
"max_steps": 348,
|
| 1242 |
+
"num_input_tokens_seen": 0,
|
| 1243 |
+
"num_train_epochs": 3,
|
| 1244 |
+
"save_steps": 99999,
|
| 1245 |
+
"stateful_callbacks": {
|
| 1246 |
+
"TrainerControl": {
|
| 1247 |
+
"args": {
|
| 1248 |
+
"should_epoch_stop": false,
|
| 1249 |
+
"should_evaluate": false,
|
| 1250 |
+
"should_log": false,
|
| 1251 |
+
"should_save": false,
|
| 1252 |
+
"should_training_stop": false
|
| 1253 |
+
},
|
| 1254 |
+
"attributes": {}
|
| 1255 |
+
}
|
| 1256 |
+
},
|
| 1257 |
+
"total_flos": 2.7756656592022405e+18,
|
| 1258 |
+
"train_batch_size": 3,
|
| 1259 |
+
"trial_name": null,
|
| 1260 |
+
"trial_params": null
|
| 1261 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1c4d71b360d33ccb4c5b101097ed739788bea0263881f94749850a678cd0ee97
|
| 3 |
+
size 5649
|
training_loss.png
ADDED
|