From f10383ffffe53891212341c4f0d731771386f200 Mon Sep 17 00:00:00 2001 From: wuyiqunLu Date: Fri, 20 Sep 2024 00:37:32 +0800 Subject: [PATCH] fix: address second conversation issue --- pyproject.toml | 2 +- vision_agent/agent/vision_agent.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 78dedd8d..93dbb50a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ requests = "2.*" tqdm = ">=4.64.0,<5.0.0" pandas = "2.*" openai = "1.*" +flake8 = "^7.0.0" typing_extensions = "4.*" opencv-python = "4.*" tabulate = "^0.9.0" @@ -47,7 +48,6 @@ av = "^11.0.0" autoflake = "1.*" pytest = "7.*" black = ">=23,<25" -flake8 = "5.*" isort = "5.*" responses = "^0.23.1" mypy = "<1.8.0" diff --git a/vision_agent/agent/vision_agent.py b/vision_agent/agent/vision_agent.py index fa68e6b9..1e1abbe6 100644 --- a/vision_agent/agent/vision_agent.py +++ b/vision_agent/agent/vision_agent.py @@ -276,8 +276,11 @@ def chat_with_code( # sometimes it gets stuck in a loop, so we force it to exit if last_response == response: response["let_user_respond"] = True - - self.streaming_message({"role": "assistant", "content": response}) + self.streaming_message( + {"role": "assistant", "error": "Stuck in loop"} + ) + else: + self.streaming_message({"role": "assistant", "content": response}) if response["let_user_respond"]: break