Skip to content

[RFC] Implementing global run #1341

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 32 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
b47d7a2
WIP
sternakt Mar 12, 2025
acf9e73
WIP
sternakt Mar 12, 2025
b33dc24
WIP
sternakt Mar 12, 2025
ce6d688
WIP
sternakt Mar 13, 2025
ae022e0
Merge branch 'main' into 1314-feature-request-implement-global-run
davorrunje Mar 13, 2025
40ddfee
WIP
sternakt Mar 14, 2025
61ec443
Refactoring (#1343)
davorrunje Mar 14, 2025
74aceb1
WIP
sternakt Mar 14, 2025
650c79d
Merge branch '1314-feature-request-implement-global-run' of https://g…
sternakt Mar 14, 2025
2d8b828
Implement global run prototype using threading
sternakt Mar 14, 2025
be540f8
Merge remote-tracking branch 'origin/main' into 1314-feature-request-…
sternakt Mar 14, 2025
e56837c
WIP: ChatManager rework
sternakt Mar 18, 2025
9349476
WIP: ChatManager rework
sternakt Mar 18, 2025
7b960a5
WIP: AgentMessage parsing
sternakt Mar 19, 2025
89c0bdd
WIP: AgentMessage parsing
sternakt Mar 19, 2025
5f94557
refactoring
davorrunje Mar 19, 2025
34e119c
WIP
sternakt Mar 19, 2025
3740b68
Merge remote-tracking branch 'origin/main' into 1314-feature-request-…
sternakt Mar 20, 2025
0fe0c1b
WIP
sternakt Mar 20, 2025
b7fdae6
WIP
sternakt Mar 21, 2025
d78d37b
Merge remote-tracking branch 'origin/main' into 1314-feature-request-…
sternakt Mar 21, 2025
926475c
WIP: Refactor AgentMessage to Events
sternakt Mar 21, 2025
2553715
WIP: replace *Message with *Event models
sternakt Mar 21, 2025
0df46f6
WIP
sternakt Mar 24, 2025
340457f
Modify messages tests after deprecation
sternakt Mar 24, 2025
f770837
Fix exceptions
sternakt Mar 25, 2025
cc50722
WIP
sternakt Mar 25, 2025
402304a
Merge remote-tracking branch 'origin/main' into 1314-feature-request-…
sternakt Mar 25, 2025
ec85a6b
refactoring
davorrunje Mar 25, 2025
7996d97
Add sequential and nested chat examples
sternakt Mar 25, 2025
25ec68c
WIP
sternakt Mar 25, 2025
50e761c
Fix basic global run examples
sternakt Mar 26, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions autogen/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
ConversableAgent,
GroupChat,
GroupChatManager,
LLMMessageType,
OnCondition,
OnContextCondition,
SwarmAgent,
Expand Down Expand Up @@ -53,6 +54,7 @@
filter_config,
get_config_list,
)
from .run import a_run, run
from .version import __version__

# Set the root logger.
Expand All @@ -79,6 +81,7 @@
"GroupChatManager",
"InvalidCarryOverTypeError",
"LLMConfig",
"LLMMessageType",
"ModelClient",
"NoEligibleSpeakerError",
"OnCondition",
Expand All @@ -92,6 +95,7 @@
"UserProxyAgent",
"__version__",
"a_initiate_swarm_chat",
"a_run",
"config_list_from_dotenv",
"config_list_from_json",
"config_list_from_models",
Expand All @@ -104,4 +108,5 @@
"initiate_swarm_chat",
"register_function",
"register_hand_off",
"run",
]
3 changes: 2 additions & 1 deletion autogen/agentchat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
from .agent import Agent, LLMAgent
from .agent import Agent, LLMAgent, LLMMessageType
from .assistant_agent import AssistantAgent
from .chat import ChatResult, a_initiate_chats, initiate_chats

Expand Down Expand Up @@ -43,6 +43,7 @@
"GroupChat",
"GroupChatManager",
"LLMAgent",
"LLMMessageType",
"OnCondition",
"OnContextCondition",
"SwarmAgent",
Expand Down
89 changes: 78 additions & 11 deletions autogen/agentchat/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,23 @@
#
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
from typing import TYPE_CHECKING, Any, Optional, Protocol, Union, runtime_checkable
from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal, Optional, Protocol, Union, runtime_checkable

from ..cache.abstract_cache_base import AbstractCache
from ..doc_utils import export_module

__all__ = ["Agent", "LLMAgent"]
if TYPE_CHECKING:
# mypy will fail if Conversible agent does not implement Agent protocol
from ..tools.tool import Tool
from .chat import ChatResult
from .conversable_agent import ConversableAgent


__all__ = ["DEFAULT_SUMMARY_METHOD", "Agent", "LLMAgent", "LLMMessageType"]

DEFAULT_SUMMARY_METHOD = "last_msg"

LLMMessageType = dict[str, Any]


@runtime_checkable
Expand All @@ -32,9 +44,14 @@ def description(self) -> str:
"""
...

@property
def llm_config(self) -> Union[dict[str, Any], Literal[False]]:
"""The LLM configuration of the agent."""
...

def send(
self,
message: Union[dict[str, Any], str],
message: Union["LLMMessageType", str],
recipient: "Agent",
request_reply: Optional[bool] = None,
) -> None:
Expand All @@ -50,7 +67,7 @@ def send(

async def a_send(
self,
message: Union[dict[str, Any], str],
message: Union["LLMMessageType", str],
recipient: "Agent",
request_reply: Optional[bool] = None,
) -> None:
Expand All @@ -66,7 +83,7 @@ async def a_send(

def receive(
self,
message: Union[dict[str, Any], str],
message: Union["LLMMessageType", str],
sender: "Agent",
request_reply: Optional[bool] = None,
) -> None:
Expand All @@ -81,7 +98,7 @@ def receive(

async def a_receive(
self,
message: Union[dict[str, Any], str],
message: Union["LLMMessageType", str],
sender: "Agent",
request_reply: Optional[bool] = None,
) -> None:
Expand All @@ -97,7 +114,7 @@ async def a_receive(

def generate_reply(
self,
messages: Optional[list[dict[str, Any]]] = None,
messages: Optional[list["LLMMessageType"]] = None,
sender: Optional["Agent"] = None,
**kwargs: Any,
) -> Union[str, dict[str, Any], None]:
Expand All @@ -116,7 +133,7 @@ def generate_reply(

async def a_generate_reply(
self,
messages: Optional[list[dict[str, Any]]] = None,
messages: Optional[list["LLMMessageType"]] = None,
sender: Optional["Agent"] = None,
**kwargs: Any,
) -> Union[str, dict[str, Any], None]:
Expand All @@ -134,6 +151,58 @@ async def a_generate_reply(
"""
...

def initiate_chat(
self,
recipient: "ConversableAgent",
clear_history: bool = True,
silent: Optional[bool] = False,
cache: Optional[AbstractCache] = None,
max_turns: Optional[int] = None,
summary_method: Optional[Union[str, Callable[..., Any]]] = DEFAULT_SUMMARY_METHOD,
summary_args: Optional[dict[str, Any]] = {},
message: Optional[Union["LLMMessageType", str, Callable[..., Any]]] = None,
**kwargs: Any,
) -> "ChatResult": ...

async def a_initiate_chat(
self,
recipient: "ConversableAgent",
clear_history: bool = True,
silent: Optional[bool] = False,
cache: Optional[AbstractCache] = None,
max_turns: Optional[int] = None,
summary_method: Optional[Union[str, Callable[..., Any]]] = DEFAULT_SUMMARY_METHOD,
summary_args: Optional[dict[str, Any]] = {},
message: Optional[Union["LLMMessageType", str, Callable[..., Any]]] = None,
**kwargs: Any,
) -> "ChatResult": ...

def run(
self,
message: str,
*,
tools: Optional[Union["Tool", Iterable["Tool"]]] = None,
executor_kwargs: Optional[dict[str, Any]] = None,
max_turns: Optional[int] = None,
msg_to: Literal["agent", "user"] = "agent",
clear_history: bool = False,
user_input: bool = True,
summary_method: Optional[Union[str, Callable[..., Any]]] = DEFAULT_SUMMARY_METHOD,
) -> "ChatResult": ...

async def a_run(
self,
message: str,
*,
tools: Optional[Union["Tool", Iterable["Tool"]]] = None,
executor_kwargs: Optional[dict[str, Any]] = None,
max_turns: Optional[int] = None,
msg_to: Literal["agent", "user"] = "agent",
clear_history: bool = False,
user_input: bool = True,
summary_method: Optional[Union[str, Callable[..., Any]]] = DEFAULT_SUMMARY_METHOD,
) -> "ChatResult": ...


@runtime_checkable
@export_module("autogen")
Expand All @@ -154,7 +223,5 @@ def update_system_message(self, system_message: str) -> None:

if TYPE_CHECKING:
# mypy will fail if Conversible agent does not implement Agent protocol
from .conversable_agent import ConversableAgent

def _check_protocol_implementation(agent: ConversableAgent) -> Agent:
def _check_protocol_implementation(agent: ConversableAgent) -> LLMAgent:
return agent
4 changes: 2 additions & 2 deletions autogen/agentchat/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from typing import Any

from ..doc_utils import export_module
from ..events.agent_events import PostCarryoverProcessingEvent
from ..io.base import IOStream
from ..messages.agent_messages import PostCarryoverProcessingMessage
from .utils import consolidate_chat_info

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -133,7 +133,7 @@ def __post_carryover_processing(chat_info: dict[str, Any]) -> None:
UserWarning,
)

iostream.send(PostCarryoverProcessingMessage(chat_info=chat_info))
iostream.send(PostCarryoverProcessingEvent(chat_info=chat_info))


@export_module("autogen")
Expand Down
4 changes: 2 additions & 2 deletions autogen/agentchat/contrib/capabilities/generate_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import re
from typing import Any, Literal, Optional, Protocol, Union

from .... import Agent, ConversableAgent, code_utils
from .... import Agent, ConversableAgent, LLMMessageType, code_utils
from ....cache import AbstractCache
from ....import_utils import optional_import_block, require_optional_import
from ....llm_config import LLMConfig
Expand Down Expand Up @@ -216,7 +216,7 @@ def add_to_agent(self, agent: ConversableAgent):
def _image_gen_reply(
self,
recipient: ConversableAgent,
messages: Optional[list[dict[str, Any]]],
messages: Optional[list["LLMMessageType"]],
sender: Optional[Agent] = None,
config: Optional[Any] = None,
) -> tuple[bool, Optional[Union[str, dict[str, Any]]]]:
Expand Down
11 changes: 7 additions & 4 deletions autogen/agentchat/contrib/capabilities/teachability.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from ....formatting_utils import colored
from ....import_utils import optional_import_block, require_optional_import
from ....llm_config import LLMConfig
from ...agent import LLMMessageType
from ...assistant_agent import ConversableAgent
from ..text_analyzer_agent import TextAnalyzerAgent
from .agent_capability import AgentCapability
Expand Down Expand Up @@ -92,7 +93,7 @@ def prepopulate_db(self):
"""Adds a few arbitrary memos to the DB."""
self.memo_store.prepopulate()

def process_last_received_message(self, text: Union[dict[str, Any], str]):
def process_last_received_message(self, text: Union["LLMMessageType", str]):
"""Appends any relevant memos to the message text, and stores any apparent teachings in new memos.
Uses TextAnalyzerAgent to make decisions about memo storage and retrieval.
"""
Expand All @@ -107,7 +108,7 @@ def process_last_received_message(self, text: Union[dict[str, Any], str]):
# Return the (possibly) expanded message text.
return expanded_text

def _consider_memo_storage(self, comment: Union[dict[str, Any], str]):
def _consider_memo_storage(self, comment: Union["LLMMessageType", str]):
"""Decides whether to store something from one user comment in the DB."""
memo_added = False

Expand Down Expand Up @@ -165,7 +166,7 @@ def _consider_memo_storage(self, comment: Union[dict[str, Any], str]):
# Yes. Save them to disk.
self.memo_store._save_memos()

def _consider_memo_retrieval(self, comment: Union[dict[str, Any], str]):
def _consider_memo_retrieval(self, comment: Union["LLMMessageType", str]):
"""Decides whether to retrieve memos from the DB, and add them to the chat context."""
# First, use the comment directly as the lookup key.
if self.verbosity >= 1:
Expand Down Expand Up @@ -228,7 +229,9 @@ def _concatenate_memo_texts(self, memo_list: list) -> str:
memo_texts = memo_texts + "\n" + info
return memo_texts

def _analyze(self, text_to_analyze: Union[dict[str, Any], str], analysis_instructions: Union[dict[str, Any], str]):
def _analyze(
self, text_to_analyze: Union["LLMMessageType", str], analysis_instructions: Union["LLMMessageType", str]
):
"""Asks TextAnalyzerAgent to analyze the given text according to specific instructions."""
self.analyzer.reset() # Clear the analyzer's list of messages.
self.teachable_agent.send(
Expand Down
7 changes: 5 additions & 2 deletions autogen/agentchat/contrib/capabilities/transform_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,15 @@
# Portions derived from https://github.com/microsoft/autogen are under the MIT License.
# SPDX-License-Identifier: MIT
import copy
from typing import Any
from typing import TYPE_CHECKING, Any

from ....formatting_utils import colored
from ...conversable_agent import ConversableAgent
from .transforms import MessageTransform

if TYPE_CHECKING:
from ... import LLMMessageType


class TransformMessages:
"""Agent capability for transforming messages before reply generation.
Expand Down Expand Up @@ -65,7 +68,7 @@ def add_to_agent(self, agent: ConversableAgent):
"""
agent.register_hook(hookable_method="process_all_messages_before_reply", hook=self._transform_messages)

def _transform_messages(self, messages: list[dict[str, Any]]) -> list[dict[str, Any]]:
def _transform_messages(self, messages: list["LLMMessageType"]) -> list[dict[str, Any]]:
post_transform_messages = copy.deepcopy(messages)
system_message = None

Expand Down
Loading
Loading