| | |
| | |
| | |
| | |
| | |
| |
|
| |
|
| | |
| | model_hub: "HaNguyen/test_llama2" |
| | llama2_folder: recipes/MultiWOZ/response_generation/llama2/results/train_with_llama2/1995/save/llama2_checkpoint/ |
| |
|
| |
|
| | |
| | max_history: 2 |
| |
|
| | |
| | freeze_model: True |
| | num_beams: 8 |
| | max_new_tokens: 50 |
| | top_k: 45 |
| | top_p: 0.9 |
| |
|
| | |
| | model: !new:custom.LLAMA2_expanded |
| | source: !ref <model_hub> |
| | freeze: !ref <freeze_model> |
| | save_path: !ref <llama2_folder> |
| | max_new_tokens: !ref <max_new_tokens> |
| | num_beams: !ref <num_beams> |
| | top_k: !ref <top_k> |
| | top_p: !ref <top_p> |
| | with_peft: True |
| |
|
| |
|
| | |
| | padding_mask: !name:speechbrain.lobes.models.transformer.Transformer.get_key_padding_mask |
| |
|
| | pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer |
| | loadables: |
| | model: !ref <model> |
| |
|
| | modules: |
| | model: !ref <model> |
| |
|
| |
|