| import json |
| from copy import deepcopy |
| from typing import Any, Dict, List |
|
|
| from flow_modules.aiflows.ChatFlowModule import ChatAtomicFlow |
|
|
| from dataclasses import dataclass |
|
|
|
|
| @dataclass |
| class Command: |
| name: str |
| description: str |
| input_args: List[str] |
|
|
| class ControllerFlow_ExtLib(ChatAtomicFlow): |
| def __init__( |
| self, |
| commands: List[Command], |
| **kwargs): |
| super().__init__(**kwargs) |
| self.system_message_prompt_template = self.system_message_prompt_template.partial( |
| commands=self._build_commands_manual(commands), |
| plan_file_location="no location yet", |
| plan="no plan yet", |
| logs="no logs yet", |
| ) |
| self.hint_for_model = """ |
| Make sure your response is in the following format: |
| Response Format: |
| { |
| "command": "call one of the commands you have e.g. `write_code`", |
| "command_args": { |
| "arg name": "value" |
| } |
| } |
| """ |
|
|
| @staticmethod |
| def _build_commands_manual(commands: List[Command]) -> str: |
| ret = "" |
| for i, command in enumerate(commands): |
| command_input_json_schema = json.dumps( |
| {input_arg: f"YOUR_{input_arg.upper()}" for input_arg in command.input_args}) |
| ret += f"{i + 1}. {command.name}: {command.description} Input arguments (given in the JSON schema): {command_input_json_schema}\n" |
| return ret |
|
|
| def _get_plan_file_location(self, input_data: Dict[str, Any]): |
| assert "memory_files" in input_data, "memory_files not passed to Extlib/Controller" |
| assert "plan" in input_data["memory_files"], "plan not in memory files" |
| return input_data["memory_files"]["plan"] |
|
|
| def _get_plan_content(self, input_data: Dict[str, Any]): |
| assert "plan" in input_data, "plan not passed to Extlib/Controller" |
| plan_content = input_data["plan"] |
| if len(plan_content) == 0: |
| plan_content = 'No plan yet' |
| return plan_content |
|
|
| def _get_logs_content(self, input_data: Dict[str, Any]): |
| assert "logs" in input_data, "logs not passed to Extlib/Controller" |
| logs_content = input_data["logs"] |
| if len(logs_content) == 0: |
| logs_content = "No logs yet" |
| return logs_content |
|
|
| @classmethod |
| def instantiate_from_config(cls, config): |
| flow_config = deepcopy(config) |
|
|
| kwargs = {"flow_config": flow_config} |
|
|
| |
| kwargs.update(cls._set_up_prompts(flow_config)) |
|
|
| |
| kwargs.update(cls._set_up_backend(flow_config)) |
|
|
| |
| commands = flow_config["commands"] |
| commands = [ |
| Command(name, command_conf["description"], command_conf["input_args"]) for name, command_conf in |
| commands.items() |
| ] |
| kwargs.update({"commands": commands}) |
|
|
| |
| return cls(**kwargs) |
|
|
| def _update_prompts_and_input(self, input_data: Dict[str, Any]): |
| if 'goal' in input_data: |
| input_data['goal'] += self.hint_for_model |
| if 'result' in input_data: |
| input_data['result'] += self.hint_for_model |
| plan_file_location = self._get_plan_file_location(input_data) |
| plan_content = self._get_plan_content(input_data) |
| logs_content = self._get_logs_content(input_data) |
| self.system_message_prompt_template = self.system_message_prompt_template.partial( |
| plan_file_location=plan_file_location, |
| plan=plan_content, |
| logs=logs_content |
| ) |
|
|
| def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]: |
| self._update_prompts_and_input(input_data) |
| api_output = super().run(input_data)["api_output"].strip() |
| try: |
| response = json.loads(api_output) |
| return response |
| except json.decoder.JSONDecodeError: |
| new_input_data = input_data.copy() |
| new_input_data['result'] = "The previous respond cannot be parsed with json.loads. Make sure your next response is in JSON format." |
| new_api_output = super().run(new_input_data)["api_output"].strip() |
| return json.loads(new_api_output) |