diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 6574700e6ce6..b6cd87d2040b 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -318,7 +318,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -795,6 +797,12 @@ Global
{38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.Build.0 = Debug|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -904,6 +912,7 @@ Global
{1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
new file mode 100644
index 000000000000..fb5862e3270a
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
@@ -0,0 +1,20 @@
+
+
+
+ Exe
+ net8.0;netstandard2.0
+ enable
+ enable
+ 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
new file mode 100644
index 000000000000..ff2767a289c8
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+///
+/// This class is for demonstration purposes only.
+/// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for
+/// deciding which service to use based on the user's input or any other criteria.
+///
+public class CustomRouter()
+{
+ ///
+ /// Returns the best service id to use based on the user's input.
+ /// This demonstration uses a simple logic where your input is checked for specific keywords as a deciding factor,
+ /// if no keyword is found it defaults to the first service in the list.
+ ///
+ /// User's input prompt
+ /// List of service ids to choose from in order of importance, defaulting to the first
+ /// Service id.
+ public string FindService(string lookupPrompt, IReadOnlyList serviceIds)
+ {
+ // The order matters, if the keyword is not found, the first one is used.
+ foreach (var serviceId in serviceIds)
+ {
+ if (Contains(lookupPrompt, serviceId)) { return serviceId; }
+ }
+
+ return serviceIds[0];
+ }
+
+ // Ensure compatibility with both netstandard2.0 and net8.0 by using IndexOf instead of Contains
+ private static bool Contains(string prompt, string pattern)
+ => prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
+}
diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs
new file mode 100644
index 000000000000..5bafa4934883
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/Program.cs
@@ -0,0 +1,56 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+internal sealed partial class Program
+{
+ private static async Task Main(string[] args)
+ {
+ Console.ForegroundColor = ConsoleColor.White;
+
+ var config = new ConfigurationBuilder().AddUserSecrets().Build();
+
+ ServiceCollection services = new();
+
+ // Adding multiple connectors targeting different providers / models.
+ services.AddKernel() /* LMStudio model is selected in server side. */
+ .AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "N/A", endpoint: new Uri("http://localhost:1234"), apiKey: null)
+ .AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null)
+ .AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!)
+
+ // Adding a custom filter to capture router selected service id
+ .Services.AddSingleton(new SelectedServiceFilter());
+
+ var kernel = services.BuildServiceProvider().GetRequiredService();
+ var router = new CustomRouter();
+
+ while (true)
+ {
+ Console.Write("\n\nUser > ");
+ var userMessage = Console.ReadLine();
+
+ // Exit application if the user enters an empty message
+ if (string.IsNullOrWhiteSpace(userMessage)) { return; }
+
+ // Find the best service to use based on the user's input
+ KernelArguments arguments = new(new PromptExecutionSettings()
+ {
+ ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai"])
+ });
+
+ // Invoke the prompt and print the response
+ await foreach (var chatChunk in kernel.InvokePromptStreamingAsync(userMessage, arguments).ConfigureAwait(false))
+ {
+ Console.Write(chatChunk);
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md
new file mode 100644
index 000000000000..92ac37e7c81e
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/README.md
@@ -0,0 +1,51 @@
+# AI Model Router
+
+This sample demonstrates how to implement an AI Model Router using Semantic Kernel connectors to direct requests to various AI models based on user input. As part of this example we integrate LMStudio, Ollama, and OpenAI, utilizing the OpenAI Connector for LMStudio and Ollama due to their compatibility with the OpenAI API.
+
+> [!IMPORTANT]
+> You can modify to use any other combination of connector or OpenAI compatible API model provider.
+
+## Semantic Kernel Features Used
+
+- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
+- [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs), using to capture selected service and log in the console.
+
+## Prerequisites
+
+- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0).
+
+## Configuring the sample
+
+The sample can be configured by using the command line with .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests.
+
+### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets)
+
+```powershell
+# OpenAI (Not required if using Azure OpenAI)
+dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... "
+```
+
+## Running the sample
+
+After configuring the sample, to build and run the console application just hit `F5`.
+
+To build and run the console application from the terminal use the following commands:
+
+```powershell
+dotnet build
+dotnet run
+```
+
+### Example of a conversation
+
+> **User** > OpenAI, what is Jupiter? Keep it simple.
+
+> **Assistant** > Sure! Jupiter is the largest planet in our solar system. It's a gas giant, mostly made of hydrogen and helium, and it has a lot of storms, including the famous Great Red Spot. Jupiter also has at least 79 moons.
+
+> **User** > Ollama, what is Jupiter? Keep it simple.
+
+> **Assistant** > Jupiter is a giant planet in our solar system known for being the largest and most massive, famous for its spectacled clouds and dozens of moons including Ganymede which is bigger than Earth!
+
+> **User** > LMStudio, what is Jupiter? Keep it simple.
+
+> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years.
\ No newline at end of file
diff --git a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
new file mode 100644
index 000000000000..9824d57ebd55
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
@@ -0,0 +1,26 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+///
+/// Using a filter to log the service being used for the prompt.
+///
+public class SelectedServiceFilter : IPromptRenderFilter
+{
+ ///
+ public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ Console.ForegroundColor = ConsoleColor.Yellow;
+ Console.WriteLine($"Selected service id: '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
+
+ Console.ForegroundColor = ConsoleColor.White;
+ Console.Write("Assistant > ");
+ return next(context);
+ }
+}