From 78dfe092d88d75b160ae79b92915bde484ae0d5c Mon Sep 17 00:00:00 2001 From: wuyiqunLu <132986242+wuyiqunLu@users.noreply.github.com> Date: Tue, 21 May 2024 21:43:29 -0500 Subject: [PATCH] feat: log pregress for vision agent v2 (#90) * feat: log pregress for vision agent v2 * fix lint * fix lint * fix lint * fix lint --- vision_agent/agent/vision_agent_v2.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/vision_agent/agent/vision_agent_v2.py b/vision_agent/agent/vision_agent_v2.py index 1c7083a8..340ba42e 100644 --- a/vision_agent/agent/vision_agent_v2.py +++ b/vision_agent/agent/vision_agent_v2.py @@ -165,6 +165,7 @@ def write_and_exec_code( tool_info: str, exec: Execute, retrieved_ltm: str, + log_progress: Callable[..., str], max_retry: int = 3, verbosity: int = 0, ) -> Tuple[bool, str, str, Dict[str, List[str]]]: @@ -178,6 +179,7 @@ def write_and_exec_code( success, result = exec.run_isolation(code) if verbosity == 2: _CONSOLE.print(Syntax(code, "python", theme="gruvbox-dark", line_numbers=True)) + log_progress(f"\tCode success: {success}\n\tResult: {str(result)}", code) _LOGGER.info(f"\tCode success: {success}, result: {str(result)}") working_memory: Dict[str, List[str]] = {} while not success and counter < max_retry: @@ -204,6 +206,7 @@ def write_and_exec_code( _CONSOLE.print( Syntax(code, "python", theme="gruvbox-dark", line_numbers=True) ) + log_progress(f"\tDebugging reflection: {reflection}\n\tResult: {result}") _LOGGER.info(f"\tDebugging reflection: {reflection}, result: {result}") if success: @@ -224,6 +227,7 @@ def run_plan( exec: Execute, code: str, tool_recommender: Sim, + log_progress: Callable[..., str], long_term_memory: Optional[Sim] = None, verbosity: int = 0, ) -> Tuple[str, str, List[Dict[str, Any]], Dict[str, List[str]]]: @@ -234,6 +238,10 @@ def run_plan( working_memory: Dict[str, List[str]] = {} for task in active_plan: + log_progress( + f"""Going to run the following task(s) in sequence: +{tabulate(tabular_data=[task], headers="keys", tablefmt="mixed_grid", maxcolwidths=_MAX_TABULATE_COL_WIDTH)}""" + ) _LOGGER.info( f""" {tabulate(tabular_data=[task], headers="keys", tablefmt="mixed_grid", maxcolwidths=_MAX_TABULATE_COL_WIDTH)}""" @@ -242,6 +250,7 @@ def run_plan( tool_info = "\n".join([e["doc"] for e in tools]) if verbosity == 2: + log_progress(f"Tools retrieved: {[e['desc'] for e in tools]}") _LOGGER.info(f"Tools retrieved: {[e['desc'] for e in tools]}") if long_term_memory is not None: @@ -258,6 +267,7 @@ def run_plan( tool_info, exec, retrieved_ltm, + log_progress, verbosity=verbosity, ) if task["type"] == "code": @@ -271,6 +281,8 @@ def run_plan( _CONSOLE.print( Syntax(code, "python", theme="gruvbox-dark", line_numbers=True) ) + + log_progress(f"\tCode success: {success}\n\tResult: {str(result)}") _LOGGER.info(f"\tCode success: {success} result: {str(result)}") task["success"] = success @@ -308,10 +320,12 @@ def __init__( tool_recommender: Optional[Sim] = None, long_term_memory: Optional[Sim] = None, verbosity: int = 0, + report_progress_callback: Optional[Callable[..., Any]] = None, ) -> None: self.planner = OpenAILLM(temperature=0.0, json_mode=True) self.coder = OpenAILLM(temperature=0.0) self.exec = Execute(timeout=timeout) + self.report_progress_callback = report_progress_callback if tool_recommender is None: self.tool_recommender = Sim(TOOLS_DF, sim_key="desc") else: @@ -361,6 +375,10 @@ def chat_with_workflow( working_code = task["code"] user_req, plan = write_plan(chat, plan, TOOL_DESCRIPTIONS, self.planner) + self.log_progress( + f"""Plan: +{tabulate(tabular_data=plan, headers="keys", tablefmt="mixed_grid", maxcolwidths=_MAX_TABULATE_COL_WIDTH)}""" + ) _LOGGER.info( f"""Plan: {tabulate(tabular_data=plan, headers="keys", tablefmt="mixed_grid", maxcolwidths=_MAX_TABULATE_COL_WIDTH)}""" @@ -379,6 +397,7 @@ def chat_with_workflow( self.exec, working_code, self.tool_recommender, + self.log_progress, self.long_term_memory, self.verbosity, ) @@ -393,6 +412,9 @@ def chat_with_workflow( retries += 1 + self.log_progress("The Vision Agent V2 has concluded this chat.") + self.log_progress(f"Plan success: {success}") + return { "code": working_code, "test": working_test, @@ -401,5 +423,7 @@ def chat_with_workflow( "plan": plan, } - def log_progress(self, description: str) -> None: + def log_progress(self, description: str, code: Optional[str] = "") -> None: + if self.report_progress_callback is not None: + self.report_progress_callback(description, code) pass