| | import os |
| |
|
| | import hydra |
| |
|
| | import aiflows |
| | from aiflows.flow_launchers import FlowLauncher |
| | from aiflows.backends.api_info import ApiInfo |
| | from aiflows.utils.general_helpers import read_yaml_file, quick_load_api_keys |
| |
|
| | from aiflows import logging |
| | from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache |
| | from aiflows.utils import serve_utils |
| | from aiflows.workers import run_dispatch_worker_thread |
| | from aiflows.messages import FlowMessage |
| | from aiflows.interfaces import KeyInterface |
| |
|
| | CACHING_PARAMETERS.do_caching = False |
| | |
| |
|
| | logging.set_verbosity_debug() |
| |
|
| | from aiflows import flow_verse |
| | |
| | dependencies = [ |
| | {"url": "aiflows/ControllerExecutorFlowModule", "revision": os.getcwd()}, |
| | ] |
| |
|
| | flow_verse.sync_dependencies(dependencies) |
| |
|
| | if __name__ == "__main__": |
| | |
| | |
| | api_information = [ApiInfo(backend_used="openai", api_key=os.getenv("OPENAI_API_KEY"))] |
| | |
| | |
| | |
| | |
| | |
| |
|
| | FLOW_MODULES_PATH = "./" |
| | |
| | jwt = os.getenv("COLINK_JWT") |
| | addr = os.getenv("LOCAL_COLINK_ADDRESS") |
| | |
| | cl = serve_utils.start_colink_component( |
| | "Reverse Number Demo", |
| | {"jwt": jwt, "addr": addr} |
| | ) |
| | |
| |
|
| | root_dir = "." |
| | cfg_path = os.path.join(root_dir, "demo.yaml") |
| | cfg = read_yaml_file(cfg_path) |
| | |
| | serve_utils.recursive_serve_flow( |
| | cl = cl, |
| | flow_type="ReAct_served", |
| | default_config=cfg, |
| | default_state=None, |
| | default_dispatch_point="coflows_dispatch", |
| | ) |
| | |
| | |
| | |
| |
|
| | quick_load_api_keys(cfg, api_information, key="api_infos") |
| |
|
| | |
| | |
| | |
| | |
| | data = { |
| | "id": 0, |
| | "goal": "Answer the following question: What is the profession and date of birth of Michael Jordan?", |
| | } |
| | |
| | proxy_flow = serve_utils.recursive_mount( |
| | cl=cl, |
| | client_id="local", |
| | flow_type="ReAct_served", |
| | config_overrides=cfg, |
| | initial_state=None, |
| | dispatch_point_override=None, |
| | ) |
| | |
| | input_message = FlowMessage( |
| | data= data, |
| | src_flow="Coflows team", |
| | dst_flow=proxy_flow, |
| | is_input_msg=True |
| | ) |
| | |
| | future = proxy_flow.ask(input_message) |
| | |
| | print(future.get_data()) |
| |
|