Skip to content

Commit

Permalink
.Net: Add Amazon's AWS Bedrock Connector (#7627)
Browse files Browse the repository at this point in the history
# Add Amazon's AWS Bedrock Connector

- Resolves #4335 

Adding support for Amazon AWS Connector to the Semantic Kernel.

---------

Co-authored-by: Charlize Yeh <[email protected]>
Co-authored-by: Charlize Yeh <[email protected]>
Co-authored-by: Roger Barreto <[email protected]>
  • Loading branch information
4 people authored Oct 31, 2024
1 parent 5e632fd commit 0edba1b
Show file tree
Hide file tree
Showing 67 changed files with 8,059 additions and 7 deletions.
3 changes: 3 additions & 0 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
</PropertyGroup>
<ItemGroup>
<PackageVersion Include="AWSSDK.BedrockRuntime" Version="3.7.400" />
<PackageVersion Include="AWSSDK.Extensions.NETCore.Setup" Version="3.7.301" />
<PackageVersion Include="AWSSDK.Core" Version="3.7.400" />
<PackageVersion Include="Azure.AI.Inference" Version="1.0.0-beta.2" />
<PackageVersion Include="Dapr.Actors" Version="1.14.0" />
<PackageVersion Include="Dapr.Actors.AspNetCore" Version="1.14.0" />
Expand Down
27 changes: 27 additions & 0 deletions dotnet/SK-dotnet.sln
Original file line number Diff line number Diff line change
Expand Up @@ -317,6 +317,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TimePlugin", "samples\Demos
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.AzureCosmosDBNoSQL", "src\Connectors\Connectors.Memory.AzureCosmosDBNoSQL\Connectors.Memory.AzureCosmosDBNoSQL.csproj", "{B0B3901E-AF56-432B-8FAA-858468E5D0DF}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Amazon", "src\Connectors\Connectors.Amazon\Connectors.Amazon.csproj", "{E059E9B0-1302-474D-B1B5-10A6E0F1A769}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AmazonBedrockAIModels", "samples\Demos\AmazonBedrockModels\AmazonBedrockAIModels.csproj", "{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Amazon.UnitTests", "src\Connectors\Connectors.Amazon.UnitTests\Connectors.Amazon.UnitTests.csproj", "{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests", "src\Connectors\Connectors.Redis.UnitTests\Connectors.Redis.UnitTests.csproj", "{1D4667B9-9381-4E32-895F-123B94253EE8}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}"
Expand Down Expand Up @@ -847,6 +853,24 @@ Global
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Publish|Any CPU.Build.0 = Publish|Any CPU
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B0B3901E-AF56-432B-8FAA-858468E5D0DF}.Release|Any CPU.Build.0 = Release|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Publish|Any CPU.Build.0 = Publish|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E059E9B0-1302-474D-B1B5-10A6E0F1A769}.Release|Any CPU.Build.0 = Release|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Debug|Any CPU.Build.0 = Debug|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Publish|Any CPU.Build.0 = Publish|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Release|Any CPU.ActiveCfg = Release|Any CPU
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46}.Release|Any CPU.Build.0 = Release|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Publish|Any CPU.Build.0 = Debug|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E}.Release|Any CPU.Build.0 = Release|Any CPU
{1D4667B9-9381-4E32-895F-123B94253EE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1D4667B9-9381-4E32-895F-123B94253EE8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1D4667B9-9381-4E32-895F-123B94253EE8}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
Expand Down Expand Up @@ -1151,6 +1175,9 @@ Global
{1D3EEB5B-0E06-4700-80D5-164956E43D0A} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{F312FCE1-12D7-4DEF-BC29-2FF6618509F3} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{B0B3901E-AF56-432B-8FAA-858468E5D0DF} = {24503383-A8C4-4255-9998-28D70FE8E99A}
{E059E9B0-1302-474D-B1B5-10A6E0F1A769} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
{ABEAACCD-CF63-4850-8ED5-E01379DBFC46} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{CCC6DC57-2AC1-4C8E-A448-2CC0537A288E} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
{1D4667B9-9381-4E32-895F-123B94253EE8} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89}
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89}
{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
Expand Down
11 changes: 6 additions & 5 deletions dotnet/docs/EXPERIMENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,12 @@ You can use the following diagnostic IDs to ignore warnings or errors for a part
| SKEXP0060 | Handlebars planner |
| SKEXP0060 | OpenAI Stepwise planner |
| | | | | | | |
| SKEXP0070 | Ollama AI connector |
| SKEXP0070 | Gemini AI connector |
| SKEXP0070 | Mistral AI connector |
| SKEXP0070 | ONNX AI connector |
| SKEXP0070 | Hugging Face AI connector |
| SKEXP0070 | Ollama AI connector | | | | | |
| SKEXP0070 | Gemini AI connector | | | | | |
| SKEXP0070 | Mistral AI connector | | | | | |
| SKEXP0070 | ONNX AI connector | | | | | |
| SKEXP0070 | Hugging Face AI connector | | | | | |
| SKEXP0070 | Amazon AI connector | | | | | |
| | | | | | | |
| SKEXP0080 | Process Framework |
| | | | | | | |
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<RootNamespace>AmazonBedrockAIModels</RootNamespace>
</PropertyGroup>
<PropertyGroup>
<NoWarn>$(NoWarn);SKEXP0001;SKEXP0070</NoWarn>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Connectors\Connectors.Amazon\Connectors.Amazon.csproj"/>
</ItemGroup>

</Project>
272 changes: 272 additions & 0 deletions dotnet/samples/Demos/AmazonBedrockModels/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,272 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.TextGeneration;

// List of available models
Dictionary<int, ModelDefinition> bedrockModels = GetBedrockModels();

// Get user choice
int choice = GetUserChoice();

switch (choice)
{
case 1:
await PerformChatCompletion().ConfigureAwait(false);
break;
case 2:
await PerformTextGeneration().ConfigureAwait(false);
break;
case 3:
await PerformStreamChatCompletion().ConfigureAwait(false);
break;
case 4:
await PerformStreamTextGeneration().ConfigureAwait(false);
break;
}

async Task PerformChatCompletion()
{
string userInput;
ChatHistory chatHistory = new();

// Get available chat completion models
var availableChatModels = bedrockModels.Values
.Where(m => m.Modalities.Contains(ModelDefinition.SupportedModality.ChatCompletion))
.ToDictionary(m => bedrockModels.Single(kvp => kvp.Value.Name == m.Name).Key, m => m.Name);

// Show user what models are available and let them choose
int chosenModel = GetModelNumber(availableChatModels, "chat completion");

var kernel = Kernel.CreateBuilder().AddBedrockChatCompletionService(availableChatModels[chosenModel]).Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

do
{
Console.Write("Enter a prompt (or leave empty to quit): ");
userInput = Console.ReadLine() ?? string.Empty;

if (!string.IsNullOrEmpty(userInput))
{
chatHistory.AddMessage(AuthorRole.User, userInput);
var result = await chatCompletionService.GetChatMessageContentsAsync(chatHistory).ConfigureAwait(false);
string output = "";
foreach (var message in result)
{
output += message.Content;
Console.WriteLine($"Chat Completion Answer: {message.Content}");
Console.WriteLine();
}

chatHistory.AddMessage(AuthorRole.Assistant, output);
}
} while (!string.IsNullOrEmpty(userInput));
}

async Task PerformTextGeneration()
{
// Get available text generation models
var availableTextGenerationModels = bedrockModels.Values
.Where(m => m.Modalities.Contains(ModelDefinition.SupportedModality.TextCompletion))
.ToDictionary(m => bedrockModels.Single(kvp => kvp.Value.Name == m.Name).Key, m => m.Name);

// Show user what models are available and let them choose
int chosenTextGenerationModel = GetModelNumber(availableTextGenerationModels, "text generation");

Console.Write("Text Generation Prompt: ");
string userTextPrompt = Console.ReadLine() ?? "";

var kernel = Kernel.CreateBuilder().AddBedrockTextGenerationService(availableTextGenerationModels[chosenTextGenerationModel]).Build();

var textGenerationService = kernel.GetRequiredService<ITextGenerationService>();
var textGeneration = await textGenerationService.GetTextContentsAsync(userTextPrompt).ConfigureAwait(false);
if (textGeneration.Count > 0)
{
var firstTextContent = textGeneration[0];
if (firstTextContent != null)
{
Console.WriteLine("Text Generation Answer: " + firstTextContent.Text);
}
else
{
Console.WriteLine("Text Generation Answer: (none)");
}
}
else
{
Console.WriteLine("Text Generation Answer: (No output text)");
}
}

async Task PerformStreamChatCompletion()
{
string userInput;
ChatHistory streamChatHistory = new();

// Get available streaming chat completion models
var availableStreamingChatModels = bedrockModels.Values
.Where(m => m.Modalities.Contains(ModelDefinition.SupportedModality.ChatCompletion) && m.CanStream)
.ToDictionary(m => bedrockModels.Single(kvp => kvp.Value.Name == m.Name).Key, m => m.Name);

// Show user what models are available and let them choose
int chosenStreamChatCompletionModel = GetModelNumber(availableStreamingChatModels, "stream chat completion");

var kernel = Kernel.CreateBuilder().AddBedrockChatCompletionService(availableStreamingChatModels[chosenStreamChatCompletionModel]).Build();
var chatStreamCompletionService = kernel.GetRequiredService<IChatCompletionService>();

do
{
Console.Write("Enter a prompt (or leave empty to quit): ");
userInput = Console.ReadLine() ?? string.Empty;

if (!string.IsNullOrEmpty(userInput))
{
streamChatHistory.AddMessage(AuthorRole.User, userInput);
var result = chatStreamCompletionService.GetStreamingChatMessageContentsAsync(streamChatHistory).ConfigureAwait(false);
string output = "";
await foreach (var message in result)
{
Console.Write($"{message.Content}");
output += message.Content;
}

Console.WriteLine();
streamChatHistory.AddMessage(AuthorRole.Assistant, output);
}
} while (!string.IsNullOrEmpty(userInput));
}

async Task PerformStreamTextGeneration()
{
// Get available streaming text generation models
var availableStreamingTextGenerationModels = bedrockModels.Values
.Where(m => m.Modalities.Contains(ModelDefinition.SupportedModality.TextCompletion) && m.CanStream)
.ToDictionary(m => bedrockModels.Single(kvp => kvp.Value.Name == m.Name).Key, m => m.Name);

// Show user what models are available and let them choose
int chosenStreamTextGenerationModel = GetModelNumber(availableStreamingTextGenerationModels, "stream text generation");

Console.Write("Stream Text Generation Prompt: ");
string userStreamTextPrompt = Console.ReadLine() ?? "";

var kernel = Kernel.CreateBuilder().AddBedrockTextGenerationService(availableStreamingTextGenerationModels[chosenStreamTextGenerationModel]).Build();

var streamTextGenerationService = kernel.GetRequiredService<ITextGenerationService>();
var streamTextGeneration = streamTextGenerationService.GetStreamingTextContentsAsync(userStreamTextPrompt).ConfigureAwait(true);
await foreach (var textContent in streamTextGeneration)
{
Console.Write(textContent.Text);
}

Console.WriteLine();
}

// Get the user's model choice
int GetUserChoice()
{
int pick;

// Display the available options
Console.WriteLine("Choose an option:");
Console.WriteLine("1. Chat Completion");
Console.WriteLine("2. Text Generation");
Console.WriteLine("3. Stream Chat Completion");
Console.WriteLine("4. Stream Text Generation");

Console.Write("Enter your choice (1-4): ");
while (!int.TryParse(Console.ReadLine(), out pick) || pick < 1 || pick > 4)
{
Console.WriteLine("Invalid input. Please enter a valid number from the list.");
Console.Write("Enter your choice (1-4): ");
}

return pick;
}

int GetModelNumber(Dictionary<int, string> availableModels, string serviceType)
{
int chosenModel;

// Display the model options
Console.WriteLine($"Available {serviceType} models:");
foreach (var option in availableModels)
{
Console.WriteLine($"{option.Key}. {option.Value}");
}

Console.Write($"Enter the number of the model you want to use for {serviceType}: ");
while (!int.TryParse(Console.ReadLine(), out chosenModel) || !availableModels.ContainsKey(chosenModel))
{
Console.WriteLine("Invalid input. Please enter a valid number from the list.");
Console.Write($"Enter the number of the model you want to use for {serviceType}: ");
}

return chosenModel;
}

Dictionary<int, ModelDefinition> GetBedrockModels()
{
return new Dictionary<int, ModelDefinition>
{
{ 1, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "anthropic.claude-v2", CanStream = true } },
{ 2, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "anthropic.claude-v2:1", CanStream = true } },
{ 3, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "anthropic.claude-instant-v1", CanStream = false } },
{ 4, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "anthropic.claude-3-sonnet-20240229-v1:0", CanStream = false } },
{ 5, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "anthropic.claude-3-haiku-20240307-v1:0", CanStream = false } },
{ 6, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.TextCompletion], Name = "cohere.command-light-text-v14", CanStream = false } },
{ 7, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.TextCompletion], Name = "cohere.command-text-v14", CanStream = false } },
{ 8, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "cohere.command-r-v1:0", CanStream = true } },
{ 9, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "cohere.command-r-plus-v1:0", CanStream = true } },
{ 10, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "ai21.jamba-instruct-v1:0", CanStream = true } },
{ 11, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.TextCompletion], Name = "ai21.j2-mid-v1", CanStream = false } },
{ 12, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.TextCompletion], Name = "ai21.j2-ultra-v1", CanStream = false } },
{ 13, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "meta.llama3-8b-instruct-v1:0", CanStream = true } },
{ 14, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "meta.llama3-70b-instruct-v1:0", CanStream = true } },
{ 15, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "mistral.mistral-7b-instruct-v0:2", CanStream = true } },
{ 16, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "mistral.mixtral-8x7b-instruct-v0:1", CanStream = true } },
{ 17, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "mistral.mistral-large-2402-v1:0", CanStream = true } },
{ 18, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "mistral.mistral-small-2402-v1:0", CanStream = true } },
{ 19, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "amazon.titan-text-lite-v1", CanStream = true } },
{ 20, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "amazon.titan-text-express-v1", CanStream = true } },
{ 21, new ModelDefinition { Modalities = [ModelDefinition.SupportedModality.ChatCompletion, ModelDefinition.SupportedModality.TextCompletion], Name = "amazon.titan-text-premier-v1:0", CanStream = true } }
};
}

/// <summary>
/// ModelDefinition.
/// </summary>
internal struct ModelDefinition
{
/// <summary>
/// List of services that the model supports.
/// </summary>
internal List<SupportedModality> Modalities { get; set; }
/// <summary>
/// Model ID.
/// </summary>
internal string Name { get; set; }
/// <summary>
/// If the model supports streaming.
/// </summary>
internal bool CanStream { get; set; }

/// <summary>
/// The services the model supports.
/// </summary>
internal enum SupportedModality
{
/// <summary>
/// Text completion service.
/// </summary>
TextCompletion,
/// <summary>
/// Chat completion service.
/// </summary>
ChatCompletion
}
}
Loading

0 comments on commit 0edba1b

Please sign in to comment.