From 00f3a6b24186063ec216f51b7f547576c429f516 Mon Sep 17 00:00:00 2001 From: Chris <66376200+crickman@users.noreply.github.com> Date: Tue, 17 Sep 2024 13:12:53 -0700 Subject: [PATCH] .Net Agents - Streaming Bug Fix and Support Additional Assistant Option (#8852) ### Motivation and Context Respond to two customer identified issues: 1. Add support for `AdditionalInstructions` for creating an assistant as well as invocation override. Fixes #8715 2. Fix issue with duplicated tool-call result when using `ChatCompletionAgent` streaming Fixes #8825 ### Description 1. `AdditionalInstructions` option wasn't included in the V2 migration as oversight. This is a pure addition. 2. Unit-tests added for new `AdditionalInstructions` option. 4. Duplication of the terminated function result addressed within `ChatCompletionAgent` 5. Streaming cases added to existing sample demonstrating use of `IAutoFunctionInvocationFilter` : `Concepts/Agents/ChatCompletion_FunctionTermination` ### Contribution Checklist - [X] The code builds clean without any errors or warnings - [X] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [X] All unit tests pass, and I have added new tests where possible - [X] I didn't break anyone :smile: --- .../ChatCompletion_FunctionTermination.cs | 148 ++++++++++++++++-- dotnet/src/Agents/Core/ChatCompletionAgent.cs | 10 +- .../Internal/AssistantRunOptionsFactory.cs | 1 + .../OpenAI/Internal/AssistantThreadActions.cs | 1 - .../OpenAI/OpenAIAssistantExecutionOptions.cs | 6 + .../OpenAIAssistantInvocationOptions.cs | 6 + .../AssistantRunOptionsFactoryTests.cs | 9 ++ .../OpenAI/OpenAIAssistantDefinitionTests.cs | 2 + .../OpenAIAssistantInvocationOptionsTests.cs | 3 + .../Agents/ChatCompletionAgentTests.cs | 9 +- .../Agents/MixedAgentTests.cs | 3 +- .../Agents/OpenAIAssistantAgentTests.cs | 5 +- ...penAIChatCompletionFunctionCallingTests.cs | 2 +- ...enAIChatCompletion_FunctionCallingTests.cs | 2 +- 14 files changed, 177 insertions(+), 30 deletions(-) diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs index f208fc8a7634..e1612bfc83c1 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs @@ -38,14 +38,8 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() await InvokeAgentAsync("What is the special drink?"); await InvokeAgentAsync("Thank you"); - // Display the chat history. - Console.WriteLine("================================"); - Console.WriteLine("CHAT HISTORY"); - Console.WriteLine("================================"); - foreach (ChatMessageContent message in chat) - { - this.WriteAgentChatMessage(message); - } + // Display the entire chat history. + WriteChatHistory(chat); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) @@ -91,15 +85,8 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() await InvokeAgentAsync("What is the special drink?"); await InvokeAgentAsync("Thank you"); - // Display the chat history. - Console.WriteLine("================================"); - Console.WriteLine("CHAT HISTORY"); - Console.WriteLine("================================"); - ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); - for (int index = history.Length; index > 0; --index) - { - this.WriteAgentChatMessage(history[index - 1]); - } + // Display the entire chat history. + WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync()); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) @@ -115,6 +102,133 @@ async Task InvokeAgentAsync(string input) } } + [Fact] + public async Task UseAutoFunctionInvocationFilterWithStreamingAgentInvocationAsync() + { + // Define the agent + ChatCompletionAgent agent = + new() + { + Instructions = "Answer questions about the menu.", + Kernel = CreateKernelWithFilter(), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + /// Create the chat history to capture the agent interaction. + ChatHistory chat = []; + + // Respond to user input, invoking functions where appropriate. + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup?"); + await InvokeAgentAsync("What is the special drink?"); + await InvokeAgentAsync("Thank you"); + + // Display the entire chat history. + WriteChatHistory(chat); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); + + int historyCount = chat.Count; + + bool isFirst = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat)) + { + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + if (historyCount <= chat.Count) + { + for (int index = historyCount; index < chat.Count; index++) + { + this.WriteAgentChatMessage(chat[index]); + } + } + } + } + + [Fact] + public async Task UseAutoFunctionInvocationFilterWithStreamingAgentChatAsync() + { + // Define the agent + ChatCompletionAgent agent = + new() + { + Instructions = "Answer questions about the menu.", + Kernel = CreateKernelWithFilter(), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a chat for agent interaction. + AgentGroupChat chat = new(); + + // Respond to user input, invoking functions where appropriate. + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup?"); + await InvokeAgentAsync("What is the special drink?"); + await InvokeAgentAsync("Thank you"); + + // Display the entire chat history. + WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync()); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); + + bool isFirst = false; + await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync(agent)) + { + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + } + } + + private void WriteChatHistory(IEnumerable chat) + { + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + foreach (ChatMessageContent message in chat) + { + this.WriteAgentChatMessage(message); + } + } + private Kernel CreateKernelWithFilter() { IKernelBuilder builder = Kernel.CreateBuilder(); diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs index cd56f42a9e20..37982a17613c 100644 --- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs +++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs @@ -94,7 +94,7 @@ public override async IAsyncEnumerable InvokeStream StringBuilder builder = new(); await foreach (StreamingChatMessageContent message in messages.ConfigureAwait(false)) { - role ??= message.Role; + role = message.Role; message.Role ??= AuthorRole.Assistant; message.AuthorName = this.Name; @@ -103,8 +103,6 @@ public override async IAsyncEnumerable InvokeStream yield return message; } - chat.Add(new(role ?? AuthorRole.Assistant, builder.ToString()) { AuthorName = this.Name }); - // Capture mutated messages related function calling / tools for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++) { @@ -114,6 +112,12 @@ public override async IAsyncEnumerable InvokeStream history.Add(message); } + + // Do not duplicate terminated function result to history + if (role != AuthorRole.Tool) + { + history.Add(new(role ?? AuthorRole.Assistant, builder.ToString()) { AuthorName = this.Name }); + } } internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments) diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs index 981c646254af..9cef36da3fa3 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs @@ -24,6 +24,7 @@ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition defin RunCreationOptions options = new() { + AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions, MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens), MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens), ModelOverride = invocationOptions?.ModelName, diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs index 933ed120ae2e..643f9dbb1dcc 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -11,7 +11,6 @@ using Azure; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.ChatCompletion; - using OpenAI; using OpenAI.Assistants; diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs index 074b92831c92..845cecb0956c 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs @@ -11,6 +11,12 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI; /// public sealed class OpenAIAssistantExecutionOptions { + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + /// /// The maximum number of completion tokens that may be used over the course of the run. /// diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs index 0653c83a13e2..c06921a6f0d0 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs @@ -18,6 +18,12 @@ public sealed class OpenAIAssistantInvocationOptions [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? ModelName { get; init; } + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + /// /// Set if code_interpreter tool is enabled. /// diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs index d6bcf91b8a94..e3aa50473e81 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs @@ -23,6 +23,11 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest() new("gpt-anything") { Temperature = 0.5F, + ExecutionOptions = + new() + { + AdditionalInstructions = "test", + }, }; // Act @@ -32,6 +37,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest() Assert.NotNull(options); Assert.Null(options.Temperature); Assert.Null(options.NucleusSamplingFactor); + Assert.Equal("test", options.AdditionalInstructions); Assert.Empty(options.Metadata); } @@ -77,6 +83,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest() ExecutionOptions = new() { + AdditionalInstructions = "test1", TruncationMessageCount = 5, }, }; @@ -84,6 +91,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest() OpenAIAssistantInvocationOptions invocationOptions = new() { + AdditionalInstructions = "test2", Temperature = 0.9F, TruncationMessageCount = 8, EnableJsonResponse = true, @@ -96,6 +104,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest() Assert.NotNull(options); Assert.Equal(0.9F, options.Temperature); Assert.Equal(8, options.TruncationStrategy.LastMessages); + Assert.Equal("test2", options.AdditionalInstructions); Assert.Equal(AssistantResponseFormat.JsonObject, options.ResponseFormat); Assert.Null(options.NucleusSamplingFactor); } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs index f8547f375f13..5c28373744a8 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs @@ -62,6 +62,7 @@ public void VerifyOpenAIAssistantDefinitionAssignment() ExecutionOptions = new() { + AdditionalInstructions = "test instructions", MaxCompletionTokens = 1000, MaxPromptTokens = 1000, ParallelToolCallsEnabled = false, @@ -83,6 +84,7 @@ public void VerifyOpenAIAssistantDefinitionAssignment() Assert.Equal(2, definition.Temperature); Assert.Equal(0, definition.TopP); Assert.NotNull(definition.ExecutionOptions); + Assert.Equal("test instructions", definition.ExecutionOptions.AdditionalInstructions); Assert.Equal(1000, definition.ExecutionOptions.MaxCompletionTokens); Assert.Equal(1000, definition.ExecutionOptions.MaxPromptTokens); Assert.Equal(12, definition.ExecutionOptions.TruncationMessageCount); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs index 99cbe012f183..a07690f42245 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs @@ -22,6 +22,7 @@ public void OpenAIAssistantInvocationOptionsInitialState() // Assert Assert.Null(options.ModelName); + Assert.Null(options.AdditionalInstructions); Assert.Null(options.Metadata); Assert.Null(options.Temperature); Assert.Null(options.TopP); @@ -48,6 +49,7 @@ public void OpenAIAssistantInvocationOptionsAssignment() new() { ModelName = "testmodel", + AdditionalInstructions = "test instructions", Metadata = new Dictionary() { { "a", "1" } }, MaxCompletionTokens = 1000, MaxPromptTokens = 1000, @@ -62,6 +64,7 @@ public void OpenAIAssistantInvocationOptionsAssignment() // Assert Assert.Equal("testmodel", options.ModelName); + Assert.Equal("test instructions", options.AdditionalInstructions); Assert.Equal(2, options.Temperature); Assert.Equal(0, options.TopP); Assert.Equal(1000, options.MaxCompletionTokens); diff --git a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs index fd8b815b84ab..cf4c7867a0b8 100644 --- a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs @@ -12,6 +12,7 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Agents; @@ -32,7 +33,7 @@ public sealed class ChatCompletionAgentTests() /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder", false)] [InlineData("What is the special soup?", "Clam Chowder", true)] public async Task AzureChatCompletionAgentAsync(string input, string expectedAnswerContains, bool useAutoFunctionTermination) @@ -96,7 +97,7 @@ public async Task AzureChatCompletionAgentAsync(string input, string expectedAns /// Integration test for using new function calling model /// and targeting Azure OpenAI services. /// - [Theory] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder", false)] [InlineData("What is the special soup?", "Clam Chowder", true)] public async Task AzureChatCompletionAgentUsingNewFunctionCallingModelAsync(string input, string expectedAnswerContains, bool useAutoFunctionTermination) @@ -160,7 +161,7 @@ public async Task AzureChatCompletionAgentUsingNewFunctionCallingModelAsync(stri /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Fact] + [RetryFact(typeof(HttpOperationException))] public async Task AzureChatCompletionStreamingAsync() { // Arrange @@ -206,7 +207,7 @@ public async Task AzureChatCompletionStreamingAsync() /// Integration test for using new function calling model /// and targeting Azure OpenAI services. /// - [Fact] + [RetryFact(typeof(HttpOperationException))] public async Task AzureChatCompletionStreamingUsingNewFunctionCallingModelAsync() { // Arrange diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs index 4c4792b7694e..94e9b6c34eaf 100644 --- a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs @@ -11,6 +11,7 @@ using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Agents; @@ -50,7 +51,7 @@ await this.VerifyAgentExecutionAsync( /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory] + [RetryTheory(typeof(HttpOperationException))] [InlineData(false)] [InlineData(true)] public async Task AzureOpenAIMixedAgentAsync(bool useNewFunctionCallingModel) diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index 62388488d483..e2d1ef2b1bfe 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -11,6 +11,7 @@ using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Agents; @@ -48,7 +49,7 @@ await this.ExecuteAgentAsync( /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentAsync(string input, string expectedAnswerContains) { @@ -84,7 +85,7 @@ await this.ExecuteStreamingAgentAsync( /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory/*(Skip = "No supported endpoint configured.")*/] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains) { diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs index a149d5075db3..c3616167b2c8 100644 --- a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs @@ -482,7 +482,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu Assert.NotNull(getWeatherForCityFunctionCallResult.Result); } - [Fact] + [Fact(Skip = "Weather in Boston (USA) is not supported.")] public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() { // Arrange diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs index 38dc4b52daff..0423323cfab0 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs @@ -482,7 +482,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu Assert.NotNull(getWeatherForCityFunctionCallResult.Result); } - [Fact] + [Fact(Skip = "Weather in Boston (USA) is not supported.")] public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() { // Arrange