From d0145347948a0f33eab1c50ea0aeb1dcf3bff89a Mon Sep 17 00:00:00 2001
From: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com>
Date: Thu, 31 Oct 2024 19:01:06 +0000
Subject: [PATCH] .Net: Parallel function calls option (#9487)
### Motivation and Context
This PR adds the `FunctionChoiceBehaviorOptions.AllowParallelCalls`
option and updates {Azure} OpenAI AI connectors to support it. This
option instructs the AI model to generate multiple function calls in a
single response when set to true.
_"This is especially useful if executing the given functions takes a
long time. For example, the model may call functions to get the weather
in three different locations at the same time, which will result in a
message with three function calls in the tool_calls array."_ **Source**
- [Configuring parallel function
calling](https://platform.openai.com/docs/guides/function-calling/configuring-parallel-function-calling)
Closes: https://github.com/microsoft/semantic-kernel/issues/6636
---
.../FunctionCalling/FunctionCalling.cs | 67 +++++++++++++++++++
.../AzureOpenAIChatCompletionServiceTests.cs | 55 +++++++++++++++
.../Core/AzureClientCore.ChatCompletion.cs | 5 ++
.../OpenAIChatCompletionServiceTests.cs | 52 ++++++++++++++
.../Core/ClientCore.ChatCompletion.cs | 7 +-
...omptExecutionSettingsTypeConverterTests.cs | 44 ++++++++++++
...pletion_AutoFunctionChoiceBehaviorTests.cs | 47 +++++++++++++
...pletion_AutoFunctionChoiceBehaviorTests.cs | 47 +++++++++++++
.../FunctionChoiceBehaviorOptions.cs | 12 +++-
...ctionChoiceBehaviorDeserializationTests.cs | 48 +++++++++++++
10 files changed, 382 insertions(+), 2 deletions(-)
diff --git a/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs b/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs
index 70dbe2bdd0ef..9ce10a4ae5ea 100644
--- a/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs
+++ b/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs
@@ -46,7 +46,19 @@ namespace FunctionCalling;
/// * The option enables concurrent invocation of functions by SK.
/// By default, this option is set to false, meaning that functions are invoked sequentially. Concurrent invocation is only possible if the AI model can
/// call or select multiple functions for invocation in a single request; otherwise, there is no distinction between sequential and concurrent invocation.
+/// * The option instructs the AI model to call multiple functions in one request if the model supports parallel function calls.
+/// By default, this option is set to null, meaning that the AI model default value will be used.
///
+/// The following table summarizes the effects of different combinations of these options:
+///
+/// | AllowParallelCalls | AllowConcurrentInvocation | AI function call requests | Concurrent Invocation |
+/// |---------------------|---------------------------|--------------------------------|-----------------------|
+/// | false | false | one request per call | false |
+/// | false | true | one request per call | false* |
+/// | true | false | one request per multiple calls | false |
+/// | true | true | one request per multiple calls | true |
+///
+/// `*` There's only one function to call
///
public class FunctionCalling(ITestOutputHelper output) : BaseTest(output)
{
@@ -458,6 +470,61 @@ public async Task RunNonStreamingChatCompletionApiWithConcurrentFunctionInvocati
// Expected output: Good morning! The current UTC time is 07:47 on October 22, 2024. Here are the latest news headlines: 1. Squirrel Steals Show - Discover the unexpected star of a recent event. 2. Dog Wins Lottery - Unbelievably, a lucky canine has hit the jackpot.
}
+ [Fact]
+ ///
+ /// This example demonstrates usage of the non-streaming chat completion API with that
+ /// advertises all kernel functions to the AI model and instructs the model to call multiple functions in parallel.
+ ///
+ public async Task RunNonStreamingChatCompletionApiWithParallelFunctionCallOptionAsync()
+ {
+ Kernel kernel = CreateKernel();
+
+ // The `AllowParallelCalls` option instructs the AI model to call multiple functions in parallel if the model supports parallel function calls.
+ FunctionChoiceBehaviorOptions options = new() { AllowParallelCalls = true };
+
+ OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: options) };
+
+ IChatCompletionService chatCompletionService = kernel.GetRequiredService();
+
+ ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(
+ "Good morning! What’s the current time and latest news headlines?",
+ settings,
+ kernel);
+
+ // Assert
+ Console.WriteLine(result);
+
+ // Expected output: Good morning! The current UTC time is 07:47 on October 22, 2024. Here are the latest news headlines: 1. Squirrel Steals Show - Discover the unexpected star of a recent event. 2. Dog Wins Lottery - Unbelievably, a lucky canine has hit the jackpot.
+ }
+
+ [Fact]
+ ///
+ /// This example demonstrates usage of the non-streaming chat completion API with that
+ /// advertises all kernel functions to the AI model, instructs the model to call multiple functions in parallel, and invokes them concurrently.
+ ///
+ public async Task RunNonStreamingChatCompletionApiWithParallelFunctionCallAndConcurrentFunctionInvocationOptionsAsync()
+ {
+ Kernel kernel = CreateKernel();
+
+ // The `AllowParallelCalls` option instructs the AI model to call multiple functions in parallel if the model supports parallel function calls.
+ // The `AllowConcurrentInvocation` option enables concurrent invocation of the functions.
+ FunctionChoiceBehaviorOptions options = new() { AllowParallelCalls = true, AllowConcurrentInvocation = true };
+
+ OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: options) };
+
+ IChatCompletionService chatCompletionService = kernel.GetRequiredService();
+
+ ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(
+ "Good morning! What’s the current time and latest news headlines?",
+ settings,
+ kernel);
+
+ // Assert
+ Console.WriteLine(result);
+
+ // Expected output: Good morning! The current UTC time is 07:47 on October 22, 2024. Here are the latest news headlines: 1. Squirrel Steals Show - Discover the unexpected star of a recent event. 2. Dog Wins Lottery - Unbelievably, a lucky canine has hit the jackpot.
+ }
+
private static Kernel CreateKernel()
{
// Create kernel
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
index 995d8c7e4913..074018f14fe6 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
@@ -1342,6 +1342,61 @@ public async Task ItCreatesCorrectFunctionToolCallsWhenUsingRequiredFunctionChoi
Assert.Equal("required", optionsJson.GetProperty("tool_choice").ToString());
}
+ [Theory]
+ [InlineData("auto", true)]
+ [InlineData("auto", false)]
+ [InlineData("auto", null)]
+ [InlineData("required", true)]
+ [InlineData("required", false)]
+ [InlineData("required", null)]
+ public async Task ItPassesAllowParallelCallsOptionToLLMAsync(string choice, bool? optionValue)
+ {
+ // Arrange
+ var kernel = new Kernel();
+ kernel.Plugins.AddFromFunctions("TimePlugin", [
+ KernelFunctionFactory.CreateFromMethod(() => { }, "Date"),
+ KernelFunctionFactory.CreateFromMethod(() => { }, "Now")
+ ]);
+
+ var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+
+ using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ };
+ this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Fake prompt");
+
+ var functionChoiceBehaviorOptions = new FunctionChoiceBehaviorOptions() { AllowParallelCalls = optionValue };
+
+ var executionSettings = new OpenAIPromptExecutionSettings()
+ {
+ FunctionChoiceBehavior = choice switch
+ {
+ "auto" => FunctionChoiceBehavior.Auto(options: functionChoiceBehaviorOptions),
+ "required" => FunctionChoiceBehavior.Required(options: functionChoiceBehaviorOptions),
+ _ => throw new ArgumentException("Invalid choice", nameof(choice))
+ }
+ };
+
+ // Act
+ await sut.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
+
+ // Assert
+ var optionsJson = JsonSerializer.Deserialize(Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!));
+
+ if (optionValue is null)
+ {
+ Assert.False(optionsJson.TryGetProperty("parallel_tool_calls", out _));
+ }
+ else
+ {
+ Assert.Equal(optionValue, optionsJson.GetProperty("parallel_tool_calls").GetBoolean());
+ }
+ }
+
[Fact]
public async Task ItDoesNotChangeDefaultsForToolsAndChoiceIfNeitherOfFunctionCallingConfigurationsSetAsync()
{
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
index 6627b7482fae..63d46c7c77e2 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
@@ -90,6 +90,11 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
}
}
+ if (toolCallingConfig.Options?.AllowParallelCalls is not null)
+ {
+ options.AllowParallelToolCalls = toolCallingConfig.Options.AllowParallelCalls;
+ }
+
return options;
}
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
index 943e8e577b7d..80b2ad0331c2 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
@@ -1371,6 +1371,58 @@ public async Task ItCreatesCorrectFunctionToolCallsWhenUsingRequiredFunctionChoi
Assert.Equal("required", optionsJson.GetProperty("tool_choice").ToString());
}
+ [Theory]
+ [InlineData("auto", true)]
+ [InlineData("auto", false)]
+ [InlineData("auto", null)]
+ [InlineData("required", true)]
+ [InlineData("required", false)]
+ [InlineData("required", null)]
+ public async Task ItPassesAllowParallelCallsOptionToLLMAsync(string choice, bool? optionValue)
+ {
+ // Arrange
+ var kernel = new Kernel();
+ kernel.Plugins.AddFromFunctions("TimePlugin", [
+ KernelFunctionFactory.CreateFromMethod(() => { }, "Date"),
+ KernelFunctionFactory.CreateFromMethod(() => { }, "Now")
+ ]);
+
+ var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient);
+
+ using var response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) };
+ this._messageHandlerStub.ResponseQueue.Enqueue(response);
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Fake prompt");
+
+ var functionChoiceBehaviorOptions = new FunctionChoiceBehaviorOptions() { AllowParallelCalls = optionValue };
+
+ var executionSettings = new OpenAIPromptExecutionSettings()
+ {
+ FunctionChoiceBehavior = choice switch
+ {
+ "auto" => FunctionChoiceBehavior.Auto(options: functionChoiceBehaviorOptions),
+ "required" => FunctionChoiceBehavior.Required(options: functionChoiceBehaviorOptions),
+ _ => throw new ArgumentException("Invalid choice", nameof(choice))
+ }
+ };
+
+ // Act
+ await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
+
+ // Assert
+ var optionsJson = JsonSerializer.Deserialize(Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!));
+
+ if (optionValue is null)
+ {
+ Assert.False(optionsJson.TryGetProperty("parallel_tool_calls", out _));
+ }
+ else
+ {
+ Assert.Equal(optionValue, optionsJson.GetProperty("parallel_tool_calls").GetBoolean());
+ }
+ }
+
[Fact]
public async Task ItDoesNotChangeDefaultsForToolsAndChoiceIfNeitherOfFunctionCallingConfigurationsSetAsync()
{
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
index ff160b0dfcaf..7017ca1eb929 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
@@ -468,7 +468,7 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
EndUserId = executionSettings.User,
TopLogProbabilityCount = executionSettings.TopLogprobs,
- IncludeLogProbabilities = executionSettings.Logprobs,
+ IncludeLogProbabilities = executionSettings.Logprobs
};
var responseFormat = GetResponseFormat(executionSettings);
@@ -503,6 +503,11 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
}
}
+ if (toolCallingConfig.Options?.AllowParallelCalls is not null)
+ {
+ options.AllowParallelToolCalls = toolCallingConfig.Options.AllowParallelCalls;
+ }
+
return options;
}
diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs
index d8c927393ca4..45334b1f39f4 100644
--- a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs
@@ -294,6 +294,50 @@ public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromYamlWithSpecifiedF
Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3");
}
+ [Fact]
+ public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromJsonWithOptions()
+ {
+ // Arrange
+ var yaml = """
+ function_choice_behavior:
+ type: auto
+ options:
+ allow_parallel_calls: true
+ allow_concurrent_invocation: true
+ """;
+
+ var executionSettings = this._deserializer.Deserialize(yaml);
+
+ // Act
+ var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel });
+
+ // Assert
+ Assert.True(config.Options.AllowParallelCalls);
+ Assert.True(config.Options.AllowConcurrentInvocation);
+ }
+
+ [Fact]
+ public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromJsonWithOptions()
+ {
+ // Arrange
+ var yaml = """
+ function_choice_behavior:
+ type: required
+ options:
+ allow_parallel_calls: true
+ allow_concurrent_invocation: true
+ """;
+
+ var executionSettings = this._deserializer.Deserialize(yaml);
+
+ // Act
+ var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel });
+
+ // Assert
+ Assert.True(config.Options.AllowParallelCalls);
+ Assert.True(config.Options.AllowConcurrentInvocation);
+ }
+
private readonly string _yaml = """
template_format: semantic-kernel
template: Say hello world to {{$name}} in {{$language}}
diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
index 32321fb81da9..e3ecebadf687 100644
--- a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
@@ -350,6 +350,53 @@ public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionsAutoma
Assert.True(requestIndexLog.All((item) => item == 0)); // Assert that all functions called by the AI model were executed within the same initial request.
}
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task SpecifiedInCodeInstructsAIModelToCallFunctionInParallelOrSequentiallyAsync(bool callInParallel)
+ {
+ // Arrange
+ var requestIndexLog = new ConcurrentBag();
+
+ this._kernel.ImportPluginFromType();
+ this._kernel.ImportPluginFromFunctions("WeatherUtils", [KernelFunctionFactory.CreateFromMethod(() => "Rainy day magic!", "GetCurrentWeather")]);
+
+ var invokedFunctions = new ConcurrentBag();
+
+ this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) =>
+ {
+ requestIndexLog.Add(context.RequestSequenceIndex);
+ invokedFunctions.Add(context.Function.Name);
+
+ await next(context);
+ });
+
+ var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { AllowParallelCalls = callInParallel }) };
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Give me today's date and weather.");
+
+ // Act
+ var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel);
+
+ // Assert
+ Assert.NotNull(result);
+
+ Assert.Contains("GetCurrentDate", invokedFunctions);
+ Assert.Contains("GetCurrentWeather", invokedFunctions);
+
+ if (callInParallel)
+ {
+ // Assert that all functions are called within the same initial request.
+ Assert.True(requestIndexLog.All((item) => item == 0));
+ }
+ else
+ {
+ // Assert that all functions are called in separate requests.
+ Assert.Equal([0, 1], requestIndexLog);
+ }
+ }
+
private Kernel InitializeKernel()
{
var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
index ab030369ab42..f98918d08eaf 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs
@@ -347,6 +347,53 @@ public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionsAutoma
Assert.True(requestIndexLog.All((item) => item == 0)); // Assert that all functions called by the AI model were executed within the same initial request.
}
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task SpecifiedInCodeInstructsAIModelToCallFunctionInParallelOrSequentiallyAsync(bool callInParallel)
+ {
+ // Arrange
+ var requestIndexLog = new ConcurrentBag();
+
+ this._kernel.ImportPluginFromType();
+ this._kernel.ImportPluginFromFunctions("WeatherUtils", [KernelFunctionFactory.CreateFromMethod(() => "Rainy day magic!", "GetCurrentWeather")]);
+
+ var invokedFunctions = new ConcurrentBag();
+
+ this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) =>
+ {
+ requestIndexLog.Add(context.RequestSequenceIndex);
+ invokedFunctions.Add(context.Function.Name);
+
+ await next(context);
+ });
+
+ var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { AllowParallelCalls = callInParallel }) };
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Give me today's date and weather.");
+
+ // Act
+ var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel);
+
+ // Assert
+ Assert.NotNull(result);
+
+ Assert.Contains("GetCurrentDate", invokedFunctions);
+ Assert.Contains("GetCurrentWeather", invokedFunctions);
+
+ if (callInParallel)
+ {
+ // Assert that all functions are called within the same initial request.
+ Assert.True(requestIndexLog.All((item) => item == 0));
+ }
+ else
+ {
+ // Assert that all functions are called in separate requests.
+ Assert.Equal([0, 1], requestIndexLog);
+ }
+ }
+
private Kernel InitializeKernel()
{
var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs
index 870cc75616ec..ecb3988b9611 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs
@@ -11,7 +11,17 @@ namespace Microsoft.SemanticKernel;
[Experimental("SKEXP0001")]
public sealed class FunctionChoiceBehaviorOptions
{
- /// Gets or sets whether multiple function invocations requested in parallel by the service may be invoked to run concurrently.
+ ///
+ /// Gets or sets whether AI model should prefer parallel function calls over sequential ones.
+ /// If set to true, instructs the model to call multiple functions in one request if the model supports parallel function calls.
+ /// Otherwise, it will send a request for each function call. If set to null, the AI model default value will be used.
+ ///
+ [JsonPropertyName("allow_parallel_calls")]
+ public bool? AllowParallelCalls { get; set; } = null;
+
+ ///
+ /// Gets or sets whether multiple function invocations requested in parallel by the service may be invoked to run concurrently.
+ ///
///
/// The default value is set to false. However, if the function invocations are safe to execute concurrently,
/// such as when the function does not modify shared state, this setting can be set to true.
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs
index 197640eca0f0..1d8b239f3ee2 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs
@@ -270,6 +270,54 @@ public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromJsonWithNotEmptyFu
Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3");
}
+ [Fact]
+ public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromJsonWithOptions()
+ {
+ // Arrange
+ var json = """
+ {
+ "type": "auto",
+ "options": {
+ "allow_parallel_calls": true,
+ "allow_concurrent_invocation": true
+ }
+ }
+ """;
+
+ var sut = JsonSerializer.Deserialize(json);
+
+ // Act
+ var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel });
+
+ // Assert
+ Assert.True(config.Options.AllowParallelCalls);
+ Assert.True(config.Options.AllowConcurrentInvocation);
+ }
+
+ [Fact]
+ public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromJsonWithOptions()
+ {
+ // Arrange
+ var json = """
+ {
+ "type": "required",
+ "options": {
+ "allow_parallel_calls": true,
+ "allow_concurrent_invocation": true
+ }
+ }
+ """;
+
+ var sut = JsonSerializer.Deserialize(json);
+
+ // Act
+ var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel });
+
+ // Assert
+ Assert.True(config.Options.AllowParallelCalls);
+ Assert.True(config.Options.AllowConcurrentInvocation);
+ }
+
private static KernelPlugin GetTestPlugin()
{
var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1");