| | |
| | enable_cache: True |
| |
|
| | n_api_retries: 6 |
| | wait_time_between_retries: 20 |
| |
|
| | system_name: system |
| | user_name: user |
| | assistant_name: assistant |
| |
|
| | backend: |
| | _target_: backends.llm_lite.LiteLLMBackend |
| | api_infos: ??? |
| | model_name: "gpt-3.5-turbo" |
| | n: 1 |
| | max_tokens: 2000 |
| | temperature: 0.3 |
| |
|
| |
|
| | top_p: 0.2 |
| | frequency_penalty: 0 |
| | presence_penalty: 0 |
| | stream: True |
| |
|
| |
|
| | system_message_prompt_template: |
| | _target_: flows.prompt_template.JinjaPrompt |
| |
|
| |
|
| | init_human_message_prompt_template: |
| | _target_: flows.prompt_template.JinjaPrompt |
| |
|
| | human_message_prompt_template: |
| | _target_: flows.prompt_template.JinjaPrompt |
| | template: "{{query}}" |
| | input_variables: |
| | - "query" |
| | input_interface_initialized: |
| | - "query" |
| |
|
| | query_message_prompt_template: |
| | _target_: flows.prompt_template.JinjaPrompt |
| |
|
| |
|
| | previous_messages: |
| | first_k: null |
| | last_k: null |
| |
|
| | demonstrations: null |
| | demonstrations_response_template: null |
| |
|
| | output_interface: |
| | - "api_output" |
| |
|