| import os |
|
|
| import hydra |
|
|
| import aiflows |
| from aiflows.flow_launchers import FlowLauncher |
| from aiflows.backends.api_info import ApiInfo |
| from aiflows.utils.general_helpers import read_yaml_file |
|
|
| from aiflows import logging |
| from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache |
|
|
| CACHING_PARAMETERS.do_caching = False |
| |
|
|
| logging.set_verbosity_debug() |
|
|
| dependencies = [ |
| {"url": "aiflows/ControllerExecutorFlowModule", "revision": os.getcwd()}, |
| ] |
| from aiflows import flow_verse |
|
|
| flow_verse.sync_dependencies(dependencies) |
|
|
| if __name__ == "__main__": |
| |
| |
| api_information = [ApiInfo(backend_used="openai", |
| api_key = os.getenv("OPENAI_API_KEY"))] |
| |
| |
| |
| |
| |
|
|
| path_to_output_file = None |
| |
| |
| root_dir = "." |
| cfg_path = os.path.join(root_dir, "demo.yaml") |
| cfg = read_yaml_file(cfg_path) |
| |
| cfg["flow"]["subflows_config"]["Controller"]["backend"]["api_infos"] = api_information |
| |
| |
| flow_with_interfaces = { |
| "flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"), |
| "input_interface": ( |
| None |
| if cfg.get( "input_interface", None) is None |
| else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False) |
| ), |
| "output_interface": ( |
| None |
| if cfg.get( "output_interface", None) is None |
| else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False) |
| ), |
| } |
|
|
| |
| |
| |
| data = {"id": 0, "goal": "Answer the following question: Who was the NBA champion in 2023?"} |
|
|
| |
| path_to_output_file = None |
| |
|
|
| _, outputs = FlowLauncher.launch( |
| flow_with_interfaces=flow_with_interfaces, |
| data=data, |
| path_to_output_file=path_to_output_file |
| ) |
|
|
| |
| flow_output_data = outputs[0] |
| print(flow_output_data) |
|
|