Skip to content

Commit

Permalink
ALL group
Browse files Browse the repository at this point in the history
  • Loading branch information
GoldenWind8 committed Nov 3, 2023
1 parent 8c7ec4f commit 7c799d4
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 161 deletions.
94 changes: 13 additions & 81 deletions swarms/structs/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def __init__(
interactive: bool = False,
dashboard: bool = False,
name: str = "flow-agent",
system_message: str = "system",
system_message: str = FLOW_SYSTEM_PROMPT,
dynamic_temperature: bool = False,
**kwargs: Any,
):
Expand All @@ -125,8 +125,7 @@ def __init__(
self.dashboard = dashboard
self.dynamic_temperature = dynamic_temperature
self.system_message = system_message
self._oai_messages = defaultdict(list)
self._oai_system_message = [{"content": system_message, "role": "system"}]
self.name = name

def provide_feedback(self, feedback: str) -> None:
"""Allow users to provide feedback on the responses."""
Expand Down Expand Up @@ -482,86 +481,19 @@ def streamed_token_generation(self, prompt: str) -> Generator[str, None, None]:
time.sleep(0.1)
yield token

def generate_reply(
self,
messages: Optional[List[Dict]] = None,
sender: Optional['Flow'] = None,
exclude: Optional[List[Callable]] = None,
) -> Union[str, Dict, None]:
"""Reply based on the conversation history and the sender.
def generate_reply(self, history:str, **kwargs) -> str:
"""
assert messages is not None or sender is not None, "Either messages or sender must be provided."
if messages is None:
messages = self._oai_messages[sender]

for reply_func_tuple in self._reply_func_list:
reply_func = reply_func_tuple["reply_func"]
if exclude and reply_func in exclude:
continue
if asyncio.coroutines.iscoroutinefunction(reply_func):
continue
if self._match_trigger(reply_func_tuple["trigger"], sender):
final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
if final:
return reply
return self._default_auto_reply

def generate_oai_reply(
self,
messages: Optional[List[Dict]] = None,
sender: Optional['Flow'] = None,
config: Optional[Any] = None,
) -> Tuple[bool, Union[str, Dict, None]]:
"""Generate a reply using autogen.oai."""
llm_config = self.llm_config if config is None else config
if llm_config is False:
return False, None
if messages is None:
messages = self._oai_messages[sender]

# TODO: #1143 handle token limit exceeded error
response = oai.ChatCompletion.create(
context=messages[-1].pop("context", None), messages=self._oai_system_message + messages, **llm_config
)
return True, oai.ChatCompletion.extract_text_or_function_call(response)[0]

def send(
self,
message: Union[Dict, str],
recipient: 'Flow',
request_reply: Optional[bool] = None,
silent: Optional[bool] = False,
) -> bool:
"""Send a message to another agent.
Generate a response based on the initial task.
"""
# When the agent composes and sends the message, the role of the message is "assistant"
# unless it's "function".
valid = self._append_oai_message(message, "assistant", recipient)
if valid:
recipient.receive(message, self, request_reply, silent)
else:
raise ValueError(
"Message can't be converted into a valid ChatCompletion message. Either content or function_call must be provided."
)


def last_message(self, agent: Optional['Flow'] = None) -> Dict:
"""The last message exchanged with the agent.
prompt = f"""SYSTEM_PROMPT:{self.system_message}
History:
{history}
Your response:"""
response = self.llm(prompt, **kwargs)
return {"role": self.name, "content": response}

Args:
agent (Agent): The agent in the conversation.
If None and more than one agent's conversations are found, an error will be raised.
If None and only one conversation is found, the last message of the only conversation will be returned.

Returns:
The last message exchanged with the agent.
def update_system_message(self, system_message: str):
"""Update the system message.
"""
if agent is None:
n_conversations = len(self._oai_messages)
if n_conversations == 0:
return None
if n_conversations == 1:
for conversation in self._oai_messages.values():
return conversation[-1]
raise ValueError("More than one conversation is found. Please specify the sender to get the last message.")
return self._oai_messages[agent][-1]
self.system_message = system_message
159 changes: 79 additions & 80 deletions swarms/swarms/groupchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Dict, List, Optional, Union
import logging

from .. import Flow
from swarms import Flow, OpenAI

logger = logging.getLogger(__name__)

Expand All @@ -27,8 +27,11 @@ def reset(self):
self.messages.clear()

def agent_by_name(self, name: str) -> Flow:
"""Find the next speaker based on the message."""
return self.agents[self.agent_names.index(name)]
"""Find an agent whose name is contained within the given 'name' string."""
for agent in self.agents:
if agent.name in name:
return agent
raise ValueError(f"No agent found with a name contained in '{name}'.")

def next_agent(self, agent: Flow) -> Flow:
"""Return the next agent in the list."""
Expand All @@ -53,94 +56,90 @@ def select_speaker(self, last_speaker: Flow, selector: Flow):
f"GroupChat is underpopulated with {n_agents} agents. Direct communication would be more efficient."
)

final, name = selector.generate_oai_reply(
self.messages
+ [
name = selector.generate_reply(
self.format_history(self.messages
+ [
{
"role": "system",
"content": f"Read the above conversation. Then select the next role from {self.agent_names} to play. Only return the role.",
"content": f"Read the above conversation. Then select the next most suitable role from {self.agent_names} to play. Only return the role.",
}
]
])
)
if not final:
# i = self._random.randint(0, len(self._agent_names) - 1) # randomly pick an id
return self.next_agent(last_speaker)
try:
return self.agent_by_name(name)
return self.agent_by_name(name['content'])
except ValueError:
return self.next_agent(last_speaker)

def _participant_roles(self):
return "\n".join([f"{agent.name}: {agent.system_message}" for agent in self.agents])

def format_history(self, messages: List[Dict]) -> str:
formatted_messages = []
for message in messages:
formatted_message = f"'{message['role']}:{message['content']}"
formatted_messages.append(formatted_message)
return '\n'.join(formatted_messages)

class GroupChatManager(Flow):
"""(In preview) A chat manager agent that can manage a group chat of multiple agents."""

def __init__(
self,
groupchat: GroupChat,
name: Optional[str] = "chat_manager",
# unlimited consecutive auto reply by default
max_consecutive_auto_reply: Optional[int] = sys.maxsize,
human_input_mode: Optional[str] = "NEVER",
system_message: Optional[str] = "Group chat manager.",
# seed: Optional[int] = 4,
**kwargs,
):
super().__init__(
name=name,
max_consecutive_auto_reply=max_consecutive_auto_reply,
human_input_mode=human_input_mode,
system_message=system_message,
**kwargs,
)
self.register_reply(Flow, GroupChatManager.run_chat, config=groupchat, reset_config=GroupChat.reset)
# self._random = random.Random(seed)

def run_chat(
self,
messages: Optional[List[Dict]] = None,
sender: Optional[Flow] = None,
config: Optional[GroupChat] = None,
) -> Union[str, Dict, None]:
"""Run a group chat."""
if messages is None:
messages = self._oai_messages[sender]
message = messages[-1]
speaker = sender
groupchat = config
for i in range(groupchat.max_round):
# set the name to speaker's name if the role is not function
if message["role"] != "function":
message["name"] = speaker.name
groupchat.messages.append(message)
# broadcast the message to all agents except the speaker
for agent in groupchat.agents:
if agent != speaker:
self.send(message, agent, request_reply=False, silent=True)
if i == groupchat.max_round - 1:
# the last round
break
try:
# select the next speaker
speaker = groupchat.select_speaker(speaker, self)
# let the speaker speak
reply = speaker.generate_reply(sender=self)
except KeyboardInterrupt:
# let the admin agent speak if interrupted
if groupchat.admin_name in groupchat.agent_names:
# admin agent is one of the participants
speaker = groupchat.agent_by_name(groupchat.admin_name)
reply = speaker.generate_reply(sender=self)
else:
# admin agent is not found in the participants
raise
if reply is None:
break
# The speaker sends the message without requesting a reply
speaker.send(reply, self, request_reply=False)
message = self.last_message(speaker)
return True, None
class GroupChatManager:
def __init__(self, groupchat: GroupChat, selector: Flow):
self.groupchat = groupchat
self.selector = selector



def run_chat(self, task: str):
self.groupchat.messages.append({'role':self.selector.name, 'content': task})
for i in range(self.groupchat.max_round):
speaker = self.groupchat.select_speaker(last_speaker=self.selector, selector=self.selector)
reply = speaker.generate_reply(self.groupchat.format_history(self.groupchat.messages))
self.groupchat.messages.append(reply)
print(reply)
if i == self.groupchat.max_round - 1:
break

return reply


llm = OpenAI(
openai_api_key="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",
temperature=0.5,
max_tokens=3000,
)

# Initialize the flow
flow1 = Flow(
llm=llm,
max_loops=1,
system_message="YOU ARE SILLY, YOU OFFER NOTHING OF VALUE",
name='silly',
dashboard=True,
)
flow2 = Flow(
llm=llm,
max_loops=1,
system_message="YOU ARE VERY SMART AND ANSWER RIDDLES",
name='detective',
dashboard=True,
)
flow3 = Flow(
llm=llm,
max_loops=1,
system_message="YOU MAKE RIDDLES",
name='riddler',
dashboard=True,
)
manager = Flow(
llm=llm,
max_loops=1,
system_message="YOU ARE A GROUP CHAT MANAGER",
name='manager',
dashboard=True,
)


# Example usage:
agents = [flow1, flow2, flow3]

group_chat = GroupChat(agents=agents, messages=[], max_round=5)
chat_manager = GroupChatManager(groupchat=group_chat, selector = manager)
chat_history = chat_manager.run_chat("Write me a riddle and answer it")

0 comments on commit 7c799d4

Please sign in to comment.