Skip to content

Commit

Permalink
Merge branch 'main' into stream-token-count
Browse files Browse the repository at this point in the history
  • Loading branch information
sonichi authored Aug 6, 2024
2 parents beb105a + 2ab74db commit 286d647
Show file tree
Hide file tree
Showing 41 changed files with 445 additions and 244 deletions.
4 changes: 2 additions & 2 deletions autogen/agentchat/contrib/retrieve_user_proxy_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@ def _generate_retrieve_user_reply(
self.problem, self.n_results * (2 * _tmp_retrieve_count + 1), self._search_string
)
doc_contents = self._get_context(self._results)
if doc_contents:
if doc_contents or self.n_results * (2 * _tmp_retrieve_count + 1) >= len(self._results[0]):
break
elif update_context_case2:
# Use the current intermediate info as the query text to retrieve docs, and each time we append the top similar
Expand All @@ -531,7 +531,7 @@ def _generate_retrieve_user_reply(
)
self._get_context(self._results)
doc_contents = "\n".join(self._doc_contents) # + "\n" + "\n".join(self._intermediate_answers)
if doc_contents:
if doc_contents or self.n_results * (2 * _tmp_retrieve_count + 1) >= len(self._results[0]):
break

self.clear_history()
Expand Down
2 changes: 2 additions & 0 deletions autogen/agentchat/contrib/society_of_mind_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def __init__(
code_execution_config: Union[Dict, Literal[False]] = False,
llm_config: Optional[Union[Dict, Literal[False]]] = False,
default_auto_reply: Optional[Union[str, Dict, None]] = "",
**kwargs,
):
super().__init__(
name=name,
Expand All @@ -50,6 +51,7 @@ def __init__(
code_execution_config=code_execution_config,
llm_config=llm_config,
default_auto_reply=default_auto_reply,
**kwargs,
)

self.update_chat_manager(chat_manager)
Expand Down
2 changes: 2 additions & 0 deletions autogen/agentchat/contrib/web_surfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def __init__(
summarizer_llm_config: Optional[Union[Dict, Literal[False]]] = None,
default_auto_reply: Optional[Union[str, Dict, None]] = "",
browser_config: Optional[Union[Dict, None]] = None,
**kwargs,
):
super().__init__(
name=name,
Expand All @@ -53,6 +54,7 @@ def __init__(
code_execution_config=code_execution_config,
llm_config=llm_config,
default_auto_reply=default_auto_reply,
**kwargs,
)

self._create_summarizer_client(summarizer_llm_config, llm_config)
Expand Down
21 changes: 16 additions & 5 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def __init__(
default_auto_reply: Union[str, Dict] = "",
description: Optional[str] = None,
chat_messages: Optional[Dict[Agent, List[Dict]]] = None,
silent: Optional[bool] = None,
):
"""
Args:
Expand Down Expand Up @@ -126,6 +127,8 @@ def __init__(
chat_messages (dict or None): the previous chat messages that this agent had in the past with other agents.
Can be used to give the agent a memory by providing the chat history. This will allow the agent to
resume previous had conversations. Defaults to an empty chat history.
silent (bool or None): (Experimental) whether to print the message sent. If None, will use the value of
silent in each function.
"""
# we change code_execution_config below and we have to make sure we don't change the input
# in case of UserProxyAgent, without this we could even change the default value {}
Expand All @@ -147,6 +150,7 @@ def __init__(
if is_termination_msg is not None
else (lambda x: content_str(x.get("content")) == "TERMINATE")
)
self.silent = silent
# Take a copy to avoid modifying the given dict
if isinstance(llm_config, dict):
try:
Expand Down Expand Up @@ -263,6 +267,10 @@ def _validate_llm_config(self, llm_config):
)
self.client = None if self.llm_config is False else OpenAIWrapper(**self.llm_config)

@staticmethod
def _is_silent(agent: Agent, silent: Optional[bool] = False) -> bool:
return agent.silent if agent.silent is not None else silent

@property
def name(self) -> str:
"""Get the name of the agent."""
Expand Down Expand Up @@ -606,7 +614,9 @@ def _process_message_before_send(
"""Process the message before sending it to the recipient."""
hook_list = self.hook_lists["process_message_before_send"]
for hook in hook_list:
message = hook(sender=self, message=message, recipient=recipient, silent=silent)
message = hook(
sender=self, message=message, recipient=recipient, silent=ConversableAgent._is_silent(self, silent)
)
return message

def send(
Expand Down Expand Up @@ -648,7 +658,7 @@ def send(
Raises:
ValueError: if the message can't be converted into a valid ChatCompletion message.
"""
message = self._process_message_before_send(message, recipient, silent)
message = self._process_message_before_send(message, recipient, ConversableAgent._is_silent(self, silent))
# When the agent composes and sends the message, the role of the message is "assistant"
# unless it's "function".
valid = self._append_oai_message(message, "assistant", recipient)
Expand Down Expand Up @@ -698,7 +708,7 @@ async def a_send(
Raises:
ValueError: if the message can't be converted into a valid ChatCompletion message.
"""
message = self._process_message_before_send(message, recipient, silent)
message = self._process_message_before_send(message, recipient, ConversableAgent._is_silent(self, silent))
# When the agent composes and sends the message, the role of the message is "assistant"
# unless it's "function".
valid = self._append_oai_message(message, "assistant", recipient)
Expand Down Expand Up @@ -780,7 +790,8 @@ def _process_received_message(self, message: Union[Dict, str], sender: Agent, si
raise ValueError(
"Received message can't be converted into a valid ChatCompletion message. Either content or function_call must be provided."
)
if not silent:

if not ConversableAgent._is_silent(sender, silent):
self._print_received_message(message, sender)

def receive(
Expand Down Expand Up @@ -2185,7 +2196,7 @@ def _format_json_str(jstr):
Ex 2:
"{\n \"location\": \"Boston, MA\"\n}" -> "{"location": "Boston, MA"}"
2. this function also handles JSON escape sequences inside quotes,
2. this function also handles JSON escape sequences inside quotes.
Ex 1:
'{"args": "a\na\na\ta"}' -> '{"args": "a\\na\\na\\ta"}'
"""
Expand Down
4 changes: 4 additions & 0 deletions autogen/agentchat/user_proxy_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(
llm_config: Optional[Union[Dict, Literal[False]]] = False,
system_message: Optional[Union[str, List]] = "",
description: Optional[str] = None,
**kwargs,
):
"""
Args:
Expand Down Expand Up @@ -79,6 +80,8 @@ def __init__(
Only used when llm_config is not False. Use it to reprogram the agent.
description (str): a short description of the agent. This description is used by other agents
(e.g. the GroupChatManager) to decide when to call upon this agent. (Default: system_message)
**kwargs (dict): Please refer to other kwargs in
[ConversableAgent](conversable_agent#__init__).
"""
super().__init__(
name=name,
Expand All @@ -93,6 +96,7 @@ def __init__(
description=(
description if description is not None else self.DEFAULT_USER_PROXY_AGENT_DESCRIPTIONS[human_input_mode]
),
**kwargs,
)

if logging_enabled():
Expand Down
1 change: 0 additions & 1 deletion autogen/coding/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from pydantic import BaseModel, Field

from ..agentchat.agent import LLMAgent
from ..types import UserMessageImageContentPart, UserMessageTextContentPart

__all__ = ("CodeBlock", "CodeResult", "CodeExtractor", "CodeExecutor", "CodeExecutionConfig")
Expand Down
3 changes: 2 additions & 1 deletion autogen/oai/cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,8 +415,9 @@ def oai_messages_to_cohere_messages(

# If we're adding tool_results, like we are, the last message can't be a USER message
# So, we add a CHATBOT 'continue' message, if so.
# Changed key from "content" to "message" (jaygdesai/autogen_Jay)
if cohere_messages[-1]["role"] == "USER":
cohere_messages.append({"role": "CHATBOT", "content": "Please continue."})
cohere_messages.append({"role": "CHATBOT", "message": "Please continue."})

# We return a blank message when we have tool results
# TODO: Check what happens if tool_results aren't the latest message
Expand Down
2 changes: 1 addition & 1 deletion dotnet/eng/MetaInfo.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<VersionPrefix>0.0.16</VersionPrefix>
<VersionPrefix>0.0.17</VersionPrefix>
<Authors>AutoGen</Authors>
<PackageProjectUrl>https://microsoft.github.io/autogen-for-net/</PackageProjectUrl>
<RepositoryUrl>https://github.com/microsoft/autogen</RepositoryUrl>
Expand Down
5 changes: 4 additions & 1 deletion dotnet/src/AutoGen.Core/Extension/GroupChatExtension.cs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,10 @@ public static async IAsyncEnumerable<IMessage> SendAsync(
yield break;
}

chatHistory = messages;
// messages will contain the complete chat history, include initalize messages
// but we only need to add the last message to the chat history
// fix #3268
chatHistory = chatHistory.Append(lastMessage);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,9 @@
<EmbeddedResource Include="RestoreInteractive.config" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" Version="$(AzureOpenAIVersion)" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\AutoGen.Core\AutoGen.Core.csproj" />
<ProjectReference Include="..\AutoGen.SourceGenerator\AutoGen.SourceGenerator.csproj" OutputItemType="Analyzer" ReferenceOutputAssembly="false" />
</ItemGroup>

</Project>
105 changes: 3 additions & 102 deletions dotnet/src/AutoGen.DotnetInteractive/DotnetInteractiveFunction.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,12 @@
// DotnetInteractiveFunction.cs

using System.Text;
using System.Text.Json;
using Azure.AI.OpenAI;
using Microsoft.DotNet.Interactive.Documents;
using Microsoft.DotNet.Interactive.Documents.Jupyter;

namespace AutoGen.DotnetInteractive;

public class DotnetInteractiveFunction : IDisposable
public partial class DotnetInteractiveFunction : IDisposable
{
private readonly InteractiveService? _interactiveService = null;
private string _notebookPath;
Expand Down Expand Up @@ -71,6 +69,7 @@ public DotnetInteractiveFunction(InteractiveService interactiveService, string?
/// Run existing dotnet code from message. Don't modify the code, run it as is.
/// </summary>
/// <param name="code">code.</param>
[Function]
public async Task<string> RunCode(string code)
{
if (this._interactiveService == null)
Expand Down Expand Up @@ -117,6 +116,7 @@ public async Task<string> RunCode(string code)
/// Install nuget packages.
/// </summary>
/// <param name="nugetPackages">nuget package to install.</param>
[Function]
public async Task<string> InstallNugetPackages(string[] nugetPackages)
{
if (this._interactiveService == null)
Expand Down Expand Up @@ -173,105 +173,6 @@ private async Task AddCellAsync(string cellContent, string kernelName)
writeStream.Dispose();
}

private class RunCodeSchema
{
public string code { get; set; } = string.Empty;
}

public Task<string> RunCodeWrapper(string arguments)
{
var schema = JsonSerializer.Deserialize<RunCodeSchema>(
arguments,
new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
});

return RunCode(schema!.code);
}

public FunctionDefinition RunCodeFunction
{
get => new FunctionDefinition
{
Name = @"RunCode",
Description = """
Run existing dotnet code from message. Don't modify the code, run it as is.
""",
Parameters = BinaryData.FromObjectAsJson(new
{
Type = "object",
Properties = new
{
code = new
{
Type = @"string",
Description = @"code.",
},
},
Required = new[]
{
"code",
},
},
new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
})
};
}

private class InstallNugetPackagesSchema
{
public string[] nugetPackages { get; set; } = Array.Empty<string>();
}

public Task<string> InstallNugetPackagesWrapper(string arguments)
{
var schema = JsonSerializer.Deserialize<InstallNugetPackagesSchema>(
arguments,
new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
});

return InstallNugetPackages(schema!.nugetPackages);
}

public FunctionDefinition InstallNugetPackagesFunction
{
get => new FunctionDefinition
{
Name = @"InstallNugetPackages",
Description = """
Install nuget packages.
""",
Parameters = BinaryData.FromObjectAsJson(new
{
Type = "object",
Properties = new
{
nugetPackages = new
{
Type = @"array",
Items = new
{
Type = @"string",
},
Description = @"nuget package to install.",
},
},
Required = new[]
{
"nugetPackages",
},
},
new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
})
};
}
public void Dispose()
{
this._interactiveService?.Dispose();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ public virtual string TransformText()
using System.Threading.Tasks;
using System;
using AutoGen.Core;
using AutoGen.OpenAI.Extension;
");
if (!String.IsNullOrEmpty(NameSpace)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ using System.Text.Json.Serialization;
using System.Threading.Tasks;
using System;
using AutoGen.Core;
using AutoGen.OpenAI.Extension;

<#if (!String.IsNullOrEmpty(NameSpace)) {#>
namespace <#=NameSpace#>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ using System.Text.Json.Serialization;
using System.Threading.Tasks;
using System;
using AutoGen.Core;
using AutoGen.OpenAI.Extension;

namespace AutoGen.SourceGenerator.Tests
{
Expand Down
Loading

0 comments on commit 286d647

Please sign in to comment.