From cc63d56895ceea00f45315453935969b61b1bf4b Mon Sep 17 00:00:00 2001 From: Chris <66376200+crickman@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:33:08 -0700 Subject: [PATCH] .Net: OpenAI V2 & Assistants V2 - GA (#7151) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Accumulation of incremental changes that were reviewed separately. This change fundamentally grapples with updating the Azure AI OpenAI SDK from V1 to V2. This involves: - Including the OpenAI SDK dependency - Refactoring the OpenAI Connector - Introducing an Azure OpenAI Connector - Update the experimental `Agents.OpenAI` to support V2 assistant features (breaking change) --------- Signed-off-by: dependabot[bot] Co-authored-by: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com> Co-authored-by: SergeyMenshykh Co-authored-by: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com> Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com> Co-authored-by: Roger Barreto Co-authored-by: Dr. Artificial曾小健 <875100501@qq.com> Co-authored-by: westey <164392973+westey-m@users.noreply.github.com> Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Co-authored-by: Tao Chen Co-authored-by: Evan Mattson <35585003+moonbox3@users.noreply.github.com> Co-authored-by: Maurycy Markowski Co-authored-by: gparmigiani Co-authored-by: Atiqur Rahman Foyshal <113086917+atiq-bs23@users.noreply.github.com> Co-authored-by: Eduard van Valkenburg Co-authored-by: Andrew Desousa <33275002+andrewldesousa@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Marcelo Garcia <129542431+MarceloAGG@users.noreply.github.com> Co-authored-by: Marcelo Garcia 🛸 Co-authored-by: Eirik Tsarpalis --- .github/_typos.toml | 1 + .github/workflows/dotnet-build-and-test.yml | 9 +- dotnet/Directory.Build.props | 7 +- dotnet/Directory.Packages.props | 5 +- dotnet/SK-dotnet.sln | 27 + dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md | 196 ++ dotnet/nuget/nuget-package.props | 5 +- .../ChatCompletion_FunctionTermination.cs | 35 +- .../Agents/ChatCompletion_Streaming.cs | 23 +- .../Agents/ComplexChat_NestedShopper.cs | 22 +- .../Concepts/Agents/Legacy_AgentAuthoring.cs | 12 +- .../Concepts/Agents/Legacy_AgentCharts.cs | 49 +- .../Agents/Legacy_AgentCollaboration.cs | 25 +- .../Concepts/Agents/Legacy_AgentDelegation.cs | 16 +- .../Concepts/Agents/Legacy_AgentTools.cs | 55 +- .../samples/Concepts/Agents/Legacy_Agents.cs | 29 +- .../Concepts/Agents/MixedChat_Agents.cs | 20 +- .../Concepts/Agents/MixedChat_Files.cs | 51 +- .../Concepts/Agents/MixedChat_Images.cs | 42 +- .../Concepts/Agents/MixedChat_Reset.cs | 22 +- .../Agents/OpenAIAssistant_ChartMaker.cs | 38 +- .../OpenAIAssistant_FileManipulation.cs | 54 +- .../Agents/OpenAIAssistant_FileService.cs | 7 +- .../Agents/OpenAIAssistant_Retrieval.cs | 70 - .../AzureOpenAIWithData_ChatCompletion.cs | 27 +- .../ChatCompletion/ChatHistoryAuthorName.cs | 1 + .../ChatCompletion/OpenAI_ChatCompletion.cs | 1 + .../OpenAI_ChatCompletionMultipleChoices.cs | 133 -- .../OpenAI_ChatCompletionStreaming.cs | 1 + ..._ChatCompletionStreamingMultipleChoices.cs | 114 - .../OpenAI_CustomAzureOpenAIClient.cs | 10 +- dotnet/samples/Concepts/Concepts.csproj | 19 +- .../DependencyInjection/Kernel_Injecting.cs | 2 +- .../Functions/FunctionResult_StronglyTyped.cs | 8 +- .../Memory/TextChunkingAndEmbedding.cs | 2 +- ...ugin_RecallJsonSerializationWithOptions.cs | 2 +- .../VectorStore_DataIngestion_CustomMapper.cs | 2 +- .../VectorStore_DataIngestion_MultiStore.cs | 2 +- .../VectorStore_DataIngestion_Simple.cs | 2 +- .../Planners/AutoFunctionCallingPlanning.cs | 4 +- .../Resources/Plugins/LegacyMenuPlugin.cs | 25 - .../Concepts/Resources/Plugins/MenuPlugin.cs | 34 - .../OpenAI_TextGenerationStreaming.cs | 9 +- .../BookingRestaurant.csproj | 2 +- .../sk-chatgpt-azure-function.csproj | 2 +- .../Demos/CreateChatGptPlugin/README.md | 23 +- .../Solution/CreateChatGptPlugin.csproj | 6 +- .../config/KernelBuilderExtensions.cs | 47 +- .../FunctionInvocationApproval.csproj | 2 +- .../{AzureOpenAI.cs => AzureOpenAIOptions.cs} | 2 +- .../samples/Demos/HomeAutomation/Program.cs | 17 +- .../AutoFunctionCallingController.cs | 5 + .../Controllers/StepwisePlannerController.cs | 5 + .../Extensions/ConfigurationExtensions.cs | 1 + .../Plugins/TimePlugin.cs | 5 + .../Plugins/WeatherPlugin.cs | 4 + .../Demos/StepwisePlannerMigration/Program.cs | 5 + .../Services/IPlanProvider.cs | 4 + .../Services/PlanProvider.cs | 6 + .../StepwisePlannerMigration.csproj | 1 - .../TelemetryWithAppInsights.csproj | 2 +- .../GettingStarted/GettingStarted.csproj | 4 +- .../Step4_Dependency_Injection.cs | 2 +- .../GettingStartedWithAgents.csproj | 19 +- .../GettingStartedWithAgents/README.md | 18 +- .../Resources/cat.jpg | Bin 0 -> 37831 bytes .../Resources/employees.pdf | Bin 0 -> 43422 bytes .../{Step1_Agent.cs => Step01_Agent.cs} | 14 +- .../{Step2_Plugins.cs => Step02_Plugins.cs} | 35 +- .../{Step3_Chat.cs => Step03_Chat.cs} | 14 +- ....cs => Step04_KernelFunctionStrategies.cs} | 21 +- ...ep5_JsonResult.cs => Step05_JsonResult.cs} | 21 +- ...ction.cs => Step06_DependencyInjection.cs} | 49 +- .../{Step7_Logging.cs => Step07_Logging.cs} | 16 +- ...OpenAIAssistant.cs => Step08_Assistant.cs} | 58 +- .../Step09_Assistant_Vision.cs | 74 + .../Step10_AssistantTool_CodeInterpreter.cs} | 32 +- .../Step11_AssistantTool_FileSearch.cs | 83 + .../LearnResources/LearnResources.csproj | 3 +- .../MicrosoftLearn/AIServices.cs | 14 - .../src/Agents/Abstractions/AgentChannel.cs | 8 + dotnet/src/Agents/Abstractions/AgentChat.cs | 2 +- .../Agents/Abstractions/AggregatorChannel.cs | 2 + .../Logging/AgentChatLogMessages.cs | 2 +- dotnet/src/Agents/Core/ChatCompletionAgent.cs | 8 +- .../ChatHistorySummarizationReducer.cs | 6 +- dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj | 3 +- .../OpenAI/Extensions/AuthorRoleExtensions.cs | 2 +- .../Extensions/KernelFunctionExtensions.cs | 16 +- .../AddHeaderRequestPolicy.cs | 2 +- .../Internal/AssistantMessageFactory.cs | 64 + .../Internal/AssistantRunOptionsFactory.cs | 53 + .../{ => Internal}/AssistantThreadActions.cs | 186 +- .../Internal/AssistantToolResourcesFactory.cs | 51 + .../AssistantThreadActionsLogMessages.cs | 3 +- .../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 297 +-- .../Agents/OpenAI/OpenAIAssistantChannel.cs | 9 +- .../OpenAI/OpenAIAssistantConfiguration.cs | 91 - .../OpenAI/OpenAIAssistantDefinition.cs | 71 +- .../OpenAI/OpenAIAssistantExecutionOptions.cs | 38 + .../OpenAIAssistantInvocationOptions.cs | 88 + .../src/Agents/OpenAI/OpenAIClientProvider.cs | 172 ++ .../OpenAI/OpenAIThreadCreationOptions.cs | 37 + dotnet/src/Agents/OpenAI/RunPollingOptions.cs | 57 + .../src/Agents/UnitTests/AgentChannelTests.cs | 27 +- dotnet/src/Agents/UnitTests/AgentChatTests.cs | 60 +- .../Agents/UnitTests/Agents.UnitTests.csproj | 4 +- .../Agents/UnitTests/AggregatorAgentTests.cs | 24 +- .../UnitTests/Core/AgentGroupChatTests.cs | 28 + .../Core/Chat/AgentGroupChatSettingsTests.cs | 7 + .../AggregatorTerminationStrategyTests.cs | 41 +- .../KernelFunctionSelectionStrategyTests.cs | 78 +- .../KernelFunctionTerminationStrategyTests.cs | 23 +- .../Chat/RegExTerminationStrategyTests.cs | 20 +- .../Chat/SequentialSelectionStrategyTests.cs | 57 +- .../Core/ChatCompletionAgentTests.cs | 71 +- .../UnitTests/Core/ChatHistoryChannelTests.cs | 22 +- .../ChatHistoryReducerExtensionsTests.cs | 39 +- .../ChatHistorySummarizationReducerTests.cs | 75 +- .../ChatHistoryTruncationReducerTests.cs | 49 +- .../Extensions/ChatHistoryExtensionsTests.cs | 4 + .../UnitTests/Internal/BroadcastQueueTests.cs | 31 +- .../UnitTests/Internal/KeyEncoderTests.cs | 5 +- dotnet/src/Agents/UnitTests/MockAgent.cs | 2 +- .../UnitTests/OpenAI/AssertCollection.cs | 46 + .../Azure/AddHeaderRequestPolicyTests.cs | 7 +- .../Extensions/AuthorRoleExtensionsTests.cs | 5 +- .../Extensions/KernelExtensionsTests.cs | 6 + .../KernelFunctionExtensionsTests.cs | 20 +- .../Internal/AssistantMessageFactoryTests.cs | 210 ++ .../AssistantRunOptionsFactoryTests.cs | 139 ++ .../OpenAI/OpenAIAssistantAgentTests.cs | 610 +++-- .../OpenAIAssistantConfigurationTests.cs | 61 - .../OpenAI/OpenAIAssistantDefinitionTests.cs | 85 +- .../OpenAIAssistantInvocationOptionsTests.cs | 100 + .../OpenAI/OpenAIClientProviderTests.cs | 86 + .../OpenAIThreadCreationOptionsTests.cs | 75 + .../OpenAI/RunPollingOptionsTests.cs | 71 + .../.editorconfig | 6 + .../AzureOpenAITestHelper.cs | 30 + .../Connectors.AzureOpenAI.UnitTests.csproj | 47 + ...AzureOpenAIKernelBuilderExtensionsTests.cs | 189 ++ ...eOpenAIServiceCollectionExtensionsTests.cs | 189 ++ .../AzureOpenAIAudioToTextServiceTests.cs | 66 +- .../AzureOpenAIChatCompletionServiceTests.cs | 397 +++- ...enAITextEmbeddingGenerationServiceTests.cs | 103 + .../AzureOpenAITextToAudioServiceTests.cs | 215 ++ .../AzureOpenAITextToImageServiceTests.cs | 151 ++ ...AzureOpenAIPromptExecutionSettingsTests.cs | 294 +++ .../OpenAIPromptExecutionSettingsTests.cs | 63 + ...multiple_function_calls_test_response.json | 0 ...on_single_function_call_test_response.json | 0 ...letion_streaming_async_filter_response.txt | 13 + ..._multiple_function_calls_test_response.txt | 0 ...ing_single_function_call_test_response.txt | 0 ...hat_completion_streaming_test_response.txt | 0 .../chat_completion_test_response.json | 0 ...tion_with_data_streaming_test_response.txt | 0 ...at_completion_with_data_test_response.json | 0 ...multiple_function_calls_test_response.json | 4 +- ..._multiple_function_calls_test_response.txt | 5 + .../text-embeddings-multiple-response.txt | 20 + .../TestData/text-embeddings-response.txt | 15 + .../TestData/text-to-image-response.txt | 9 + ...ext_completion_streaming_test_response.txt | 0 .../text_completion_test_response.json | 0 .../Connectors.AzureOpenAI.csproj | 38 + .../Core/AzureClientCore.ChatCompletion.cs | 93 + .../Core/AzureClientCore.cs} | 87 +- .../AzureOpenAIKernelBuilderExtensions.cs | 524 +++++ .../AzureOpenAIServiceCollectionExtensions.cs | 496 ++++ .../AzureOpenAIAudioToTextService.cs | 32 +- .../AzureOpenAIChatCompletionService.cs | 36 +- ...ureOpenAITextEmbeddingGenerationService.cs | 30 +- .../AzureOpenAITextToAudioService.cs | 40 +- .../Services/AzureOpenAITextToImageService.cs | 136 ++ .../AzureOpenAIPromptExecutionSettings.cs | 105 + .../Connectors.OpenAI.UnitTests.csproj | 88 + .../AutoFunctionInvocationFilterTests.cs | 178 +- .../Core/ClientCoreTests.cs | 243 ++ .../Core}/OpenAIChatMessageContentTests.cs | 36 +- .../Core}/OpenAIFunctionTests.cs | 44 +- .../Core}/OpenAIFunctionToolCallTests.cs | 16 +- ...ithDataStreamingChatMessageContentTests.cs | 138 ++ .../Extensions}/ChatHistoryExtensionsTests.cs | 2 +- .../KernelBuilderExtensionsTests.cs | 163 ++ .../KernelFunctionMetadataExtensionsTests.cs | 20 +- .../OpenAIPluginCollectionExtensionsTests.cs | 10 +- .../ServiceCollectionExtensionsTests.cs | 164 ++ .../OpenAIAudioToTextServiceTests.cs | 36 +- .../OpenAIChatCompletionServiceTests.cs | 587 ++++- .../Services}/OpenAIFileServiceTests.cs | 3 +- ...enAITextEmbeddingGenerationServiceTests.cs | 134 ++ .../OpenAITextToAudioServiceTests.cs | 86 +- .../Services/OpenAITextToImageServiceTests.cs | 76 + ...OpenAIAudioToTextExecutionSettingsTests.cs | 16 +- .../OpenAIPromptExecutionSettingsTests.cs | 28 +- ...OpenAITextToAudioExecutionSettingsTests.cs | 2 +- ...letion_invalid_streaming_test_response.txt | 5 + ...multiple_function_calls_test_response.json | 64 + ...on_single_function_call_test_response.json | 32 + ..._multiple_function_calls_test_response.txt | 9 + ...ing_single_function_call_test_response.txt | 3 + ...hat_completion_streaming_test_response.txt | 5 + .../chat_completion_test_response.json | 22 + ...tion_with_data_streaming_test_response.txt | 1 + ...at_completion_with_data_test_response.json | 28 + ...multiple_function_calls_test_response.json | 40 + ..._multiple_function_calls_test_response.txt | 5 + .../text-embeddings-multiple-response.txt | 20 + .../TestData/text-embeddings-response.txt | 15 + .../TestData/text-to-image-response.txt | 8 + .../ToolCallBehaviorTests.cs | 73 +- .../AzureSdk/AddHeaderRequestPolicy.cs | 20 - .../AzureSdk/AzureOpenAITextToAudioClient.cs | 141 -- .../AzureOpenAIWithDataChatMessageContent.cs | 69 - ...enAIWithDataStreamingChatMessageContent.cs | 49 - .../Connectors.OpenAI/AzureSdk/ClientCore.cs | 1591 ------------- .../AzureSdk/CustomHostPipelinePolicy.cs | 23 - .../AzureSdk/OpenAIClientCore.cs | 106 - .../AzureSdk/OpenAIStreamingTextContent.cs | 51 - .../AzureSdk/OpenAITextToAudioClient.cs | 128 -- ...AzureOpenAIChatCompletionWithDataConfig.cs | 53 - ...zureOpenAIChatCompletionWithDataService.cs | 305 --- .../ChatWithDataMessage.cs | 18 - .../ChatWithDataRequest.cs | 71 - .../ChatWithDataResponse.cs | 57 - .../ChatWithDataStreamingResponse.cs | 64 - .../Connectors.OpenAI.csproj | 15 +- .../Core/ClientCore.AudioToText.cs | 83 + .../Core/ClientCore.ChatCompletion.cs | 1183 ++++++++++ .../Core/ClientCore.Embeddings.cs | 58 + .../Core/ClientCore.TextToAudio.cs | 72 + .../Core/ClientCore.TextToImage.cs | 50 + .../Connectors.OpenAI/Core/ClientCore.cs | 249 ++ .../OpenAIChatMessageContent.cs | 47 +- .../{AzureSdk => Core}/OpenAIFunction.cs | 26 +- .../OpenAIFunctionToolCall.cs | 52 +- .../OpenAIStreamingChatMessageContent.cs | 67 +- .../OpenAITextToImageClientCore.cs | 114 - .../ChatHistoryExtensions.cs | 6 +- .../OpenAIKernelBuilderExtensions.cs | 371 +++ .../OpenAIKernelFunctionMetadataExtensions.cs | 2 +- .../OpenAIMemoryBuilderExtensions.cs | 44 + .../OpenAIPluginCollectionExtensions.cs | 4 +- .../OpenAIServiceCollectionExtensions.cs | 345 +++ .../{Files => Models}/OpenAIFilePurpose.cs | 2 + .../{Files => Models}/OpenAIFileReference.cs | 2 + .../OpenAIMemoryBuilderExtensions.cs | 111 - .../OpenAIServiceCollectionExtensions.cs | 2042 ----------------- .../OpenAIAudioToTextService.cs | 37 +- .../OpenAIChatCompletionService.cs | 81 +- .../{Files => Services}/OpenAIFileService.cs | 5 +- .../OpenAITextEmbbedingGenerationService.cs} | 30 +- .../OpenAITextToAudioService.cs | 14 +- .../Services/OpenAITextToImageService.cs | 45 + .../OpenAIAudioToTextExecutionSettings.cs | 24 +- .../OpenAIFileUploadExecutionSettings.cs | 3 + .../OpenAIPromptExecutionSettings.cs | 141 +- .../OpenAITextToAudioExecutionSettings.cs | 24 +- .../AzureOpenAITextGenerationService.cs | 97 - .../OpenAITextGenerationService.cs | 77 - .../TextToAudio/TextToAudioRequest.cs | 26 - .../AzureOpenAITextToImageService.cs | 212 -- .../TextToImage/OpenAITextToImageService.cs | 117 - .../TextToImage/TextToImageRequest.cs | 42 - .../TextToImage/TextToImageResponse.cs | 44 - .../Connectors.OpenAI/ToolCallBehavior.cs | 60 +- .../Connectors.UnitTests.csproj | 42 +- .../MultipleHttpMessageHandlerStub.cs | 53 - .../OpenAI/AIServicesOpenAIExtensionsTests.cs | 88 - ...reOpenAIWithDataChatMessageContentTests.cs | 120 - ...ithDataStreamingChatMessageContentTests.cs | 61 - .../OpenAIStreamingTextContentTests.cs | 42 - .../RequestFailedExceptionExtensionsTests.cs | 78 - .../AzureOpenAIChatCompletionWithDataTests.cs | 201 -- .../OpenAIMemoryBuilderExtensionsTests.cs | 66 - .../OpenAIServiceCollectionExtensionsTests.cs | 746 ------ .../OpenAI/OpenAITestHelper.cs | 20 - ..._multiple_function_calls_test_response.txt | 5 - ...enAITextEmbeddingGenerationServiceTests.cs | 188 -- ...enAITextEmbeddingGenerationServiceTests.cs | 164 -- .../AzureOpenAITextGenerationServiceTests.cs | 210 -- .../OpenAITextGenerationServiceTests.cs | 113 - .../AzureOpenAITextToAudioServiceTests.cs | 130 -- .../AzureOpenAITextToImageTests.cs | 174 -- .../OpenAITextToImageServiceTests.cs | 89 - .../Agents/Experimental.Agents.csproj | 2 +- .../Agents/Extensions/OpenAIRestExtensions.cs | 3 +- .../Experimental/Agents/Internal/ChatRun.cs | 18 +- ...Orchestration.Flow.IntegrationTests.csproj | 2 +- .../PromptTemplates.Handlebars.csproj | 4 + .../Functions.Prompty.UnitTests.csproj | 2 +- .../PromptyTest.cs | 4 +- .../TestData/chatJsonObject.prompty | 1 + .../Functions.UnitTests.csproj | 2 +- .../OpenApi/RestApiOperationTests.cs | 7 +- .../Functions.Yaml/Functions.Yaml.csproj | 4 + .../Agents/ChatCompletionAgentTests.cs | 17 +- .../Agents/OpenAIAssistantAgentTests.cs | 38 +- .../AzureOpenAIAudioToTextTests.cs | 52 + .../AzureOpenAIChatCompletionTests.cs | 273 +++ ...enAIChatCompletion_FunctionCallingTests.cs | 942 ++++++++ ...eOpenAIChatCompletion_NonStreamingTests.cs | 172 ++ ...zureOpenAIChatCompletion_StreamingTests.cs | 173 ++ .../AzureOpenAITextEmbeddingTests.cs | 71 + .../AzureOpenAITextToAudioTests.cs | 44 + .../AzureOpenAITextToImageTests.cs | 41 + .../Connectors/OpenAI/AIServiceType.cs | 19 - .../Connectors/OpenAI/ChatHistoryTests.cs | 149 -- .../OpenAI/OpenAIAudioToTextTests.cs | 33 +- .../OpenAI/OpenAIChatCompletionTests.cs | 270 +++ ...nAIChatCompletion_FunctionCallingTests.cs} | 493 ++-- .../OpenAIChatCompletion_NonStreamingTests.cs | 169 ++ .../OpenAIChatCompletion_StreamingTests.cs | 176 ++ .../OpenAI/OpenAICompletionTests.cs | 668 ------ .../OpenAI/OpenAIFileServiceTests.cs | 21 +- .../OpenAI/OpenAITextEmbeddingTests.cs | 47 +- .../OpenAI/OpenAITextToAudioTests.cs | 28 +- .../OpenAI/OpenAITextToImageTests.cs | 46 +- .../PromptWithChatRolesStreamingTest.json | 21 + .../Data/PromptWithChatRolesTest.json | 5 - ...PromptWithComplexObjectsStreamingTest.json | 13 + .../Data/PromptWithComplexObjectsTest.json | 5 - ...romptWithHelperFunctionsStreamingTest.json | 17 + .../Data/PromptWithHelperFunctionsTest.json | 5 - ...PromptWithSimpleVariableStreamingTest.json | 13 + .../Data/PromptWithSimpleVariableTest.json | 5 - .../Data/SimplePromptStreamingTest.json | 13 + .../CrossLanguage/Data/SimplePromptTest.json | 5 - .../CrossLanguage/KernelRequestTracer.cs | 8 +- .../CrossLanguage/PromptWithChatRolesTest.cs | 10 +- .../PromptWithComplexObjectsTest.cs | 10 +- .../PromptWithHelperFunctionsTest.cs | 5 +- .../PromptWithSimpleVariableTest.cs | 5 +- .../CrossLanguage/SimplePromptTest.cs | 5 +- .../CrossLanguage/YamlPromptTest.cs | 6 +- .../IntegrationTests/IntegrationTests.csproj | 30 +- .../Handlebars/HandlebarsPlannerTests.cs | 41 +- dotnet/src/IntegrationTests/PromptTests.cs | 10 +- .../serializedChatHistoryV1_15_1.json | 125 + dotnet/src/IntegrationTests/TestHelpers.cs | 10 + dotnet/src/IntegrationTests/testsettings.json | 14 +- .../ClientResultExceptionExtensions.cs} | 19 +- .../openai/OpenAIUtilities.props | 5 + .../Policies/GeneratedActionPipelinePolicy.cs | 39 + .../samples/AgentUtilities/BaseAgentsTest.cs | 129 ++ .../samples/SamplesInternalUtilities.props | 5 +- .../src/Diagnostics/ModelDiagnostics.cs | 22 +- .../test/AssertExtensions.cs | 2 +- .../InternalUtilities/test/MoqExtensions.cs | 22 + .../AI/TextToImage/ITextToImageService.cs | 2 +- .../Contents/AnnotationContent.cs | 2 +- .../Contents/FileReferenceContent.cs | 2 +- .../SemanticKernel.Abstractions.csproj | 4 + .../SemanticKernel.Core.csproj | 4 + .../SemanticKernel.MetaPackage.csproj | 7 +- .../ClientResultExceptionExtensionsTests.cs | 72 + .../Functions/KernelBuilderTests.cs | 7 +- .../SemanticKernel.UnitTests.csproj | 4 +- .../GenericActionPipelinePolicyTests.cs | 39 + .../Utilities/OpenAI/MockPipelineResponse.cs | 151 ++ .../Utilities/OpenAI/MockResponseHeaders.cs | 32 + python/Makefile | 2 +- 364 files changed, 15290 insertions(+), 12136 deletions(-) create mode 100644 dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md delete mode 100644 dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs delete mode 100644 dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs delete mode 100644 dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs rename dotnet/samples/Demos/HomeAutomation/Options/{AzureOpenAI.cs => AzureOpenAIOptions.cs} (91%) create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg create mode 100644 dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf rename dotnet/samples/GettingStartedWithAgents/{Step1_Agent.cs => Step01_Agent.cs} (76%) rename dotnet/samples/GettingStartedWithAgents/{Step2_Plugins.cs => Step02_Plugins.cs} (76%) rename dotnet/samples/GettingStartedWithAgents/{Step3_Chat.cs => Step03_Chat.cs} (86%) rename dotnet/samples/GettingStartedWithAgents/{Step4_KernelFunctionStrategies.cs => Step04_KernelFunctionStrategies.cs} (85%) rename dotnet/samples/GettingStartedWithAgents/{Step5_JsonResult.cs => Step05_JsonResult.cs} (79%) rename dotnet/samples/GettingStartedWithAgents/{Step6_DependencyInjection.cs => Step06_DependencyInjection.cs} (65%) rename dotnet/samples/GettingStartedWithAgents/{Step7_Logging.cs => Step07_Logging.cs} (86%) rename dotnet/samples/GettingStartedWithAgents/{Step8_OpenAIAssistant.cs => Step08_Assistant.cs} (57%) create mode 100644 dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs rename dotnet/samples/{Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs => GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs} (50%) create mode 100644 dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs rename dotnet/src/Agents/OpenAI/{Azure => Internal}/AddHeaderRequestPolicy.cs (87%) create mode 100644 dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs create mode 100644 dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs rename dotnet/src/Agents/OpenAI/{ => Internal}/AssistantThreadActions.cs (70%) create mode 100644 dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs delete mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs create mode 100644 dotnet/src/Agents/OpenAI/RunPollingOptions.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AudioToText => Connectors.AzureOpenAI.UnitTests/Services}/AzureOpenAIAudioToTextServiceTests.cs (56%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/ChatCompletion => Connectors.AzureOpenAI.UnitTests/Services}/AzureOpenAIChatCompletionServiceTests.cs (65%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_multiple_function_calls_test_response.json (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_single_function_call_test_response.json (100%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_streaming_single_function_call_test_response.txt (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_streaming_test_response.txt (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_test_response.json (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_with_data_streaming_test_response.txt (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/chat_completion_with_data_test_response.json (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/filters_multiple_function_calls_test_response.json (92%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.txt rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/text_completion_streaming_test_response.txt (100%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.AzureOpenAI.UnitTests}/TestData/text_completion_test_response.json (100%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs rename dotnet/src/Connectors/{Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs => Connectors.AzureOpenAI/Core/AzureClientCore.cs} (50%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs rename dotnet/src/Connectors/{Connectors.OpenAI/AudioToText => Connectors.AzureOpenAI/Services}/AzureOpenAIAudioToTextService.cs (73%) rename dotnet/src/Connectors/{Connectors.OpenAI/ChatCompletion => Connectors.AzureOpenAI/Services}/AzureOpenAIChatCompletionService.cs (72%) rename dotnet/src/Connectors/{Connectors.OpenAI/TextEmbedding => Connectors.AzureOpenAI/Services}/AzureOpenAITextEmbeddingGenerationService.cs (76%) rename dotnet/src/Connectors/{Connectors.OpenAI/TextToAudio => Connectors.AzureOpenAI/Services}/AzureOpenAITextToAudioService.cs (60%) create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/FunctionCalling => Connectors.OpenAI.UnitTests/Core}/AutoFunctionInvocationFilterTests.cs (77%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AzureSdk => Connectors.OpenAI.UnitTests/Core}/OpenAIChatMessageContentTests.cs (73%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/FunctionCalling => Connectors.OpenAI.UnitTests/Core}/OpenAIFunctionTests.cs (83%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AzureSdk => Connectors.OpenAI.UnitTests/Core}/OpenAIFunctionToolCallTests.cs (86%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.OpenAI.UnitTests/Extensions}/ChatHistoryExtensionsTests.cs (96%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/FunctionCalling => Connectors.OpenAI.UnitTests/Extensions}/KernelFunctionMetadataExtensionsTests.cs (92%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AzureSdk => Connectors.OpenAI.UnitTests/Extensions}/OpenAIPluginCollectionExtensionsTests.cs (89%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AudioToText => Connectors.OpenAI.UnitTests/Services}/OpenAIAudioToTextServiceTests.cs (62%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/ChatCompletion => Connectors.OpenAI.UnitTests/Services}/OpenAIChatCompletionServiceTests.cs (51%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/Files => Connectors.OpenAI.UnitTests/Services}/OpenAIFileServiceTests.cs (98%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/TextToAudio => Connectors.OpenAI.UnitTests/Services}/OpenAITextToAudioServiceTests.cs (53%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/AudioToText => Connectors.OpenAI.UnitTests/Settings}/OpenAIAudioToTextExecutionSettingsTests.cs (90%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.OpenAI.UnitTests/Settings}/OpenAIPromptExecutionSettingsTests.cs (89%) rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI/TextToAudio => Connectors.OpenAI.UnitTests/Settings}/OpenAITextToAudioExecutionSettingsTests.cs (98%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt create mode 100644 dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.txt rename dotnet/src/Connectors/{Connectors.UnitTests/OpenAI => Connectors.OpenAI.UnitTests}/ToolCallBehaviorTests.cs (73%) delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Core}/OpenAIChatMessageContent.cs (65%) rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Core}/OpenAIFunction.cs (89%) rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Core}/OpenAIFunctionToolCall.cs (77%) rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Core}/OpenAIStreamingChatMessageContent.cs (56%) delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Extensions}/ChatHistoryExtensions.cs (90%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Extensions}/OpenAIKernelFunctionMetadataExtensions.cs (98%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AzureSdk => Extensions}/OpenAIPluginCollectionExtensions.cs (97%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs rename dotnet/src/Connectors/Connectors.OpenAI/{Files => Models}/OpenAIFilePurpose.cs (95%) rename dotnet/src/Connectors/Connectors.OpenAI/{Files => Models}/OpenAIFileReference.cs (84%) delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AudioToText => Services}/OpenAIAudioToTextService.cs (62%) rename dotnet/src/Connectors/Connectors.OpenAI/{ChatCompletion => Services}/OpenAIChatCompletionService.cs (61%) rename dotnet/src/Connectors/Connectors.OpenAI/{Files => Services}/OpenAIFileService.cs (97%) rename dotnet/src/Connectors/Connectors.OpenAI/{TextEmbedding/OpenAITextEmbeddingGenerationService.cs => Services/OpenAITextEmbbedingGenerationService.cs} (76%) rename dotnet/src/Connectors/Connectors.OpenAI/{TextToAudio => Services}/OpenAITextToAudioService.cs (77%) create mode 100644 dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs rename dotnet/src/Connectors/Connectors.OpenAI/{AudioToText => Settings}/OpenAIAudioToTextExecutionSettings.cs (89%) rename dotnet/src/Connectors/Connectors.OpenAI/{Files => Settings}/OpenAIFileUploadExecutionSettings.cs (85%) rename dotnet/src/Connectors/Connectors.OpenAI/{ => Settings}/OpenAIPromptExecutionSettings.cs (76%) rename dotnet/src/Connectors/Connectors.OpenAI/{TextToAudio => Settings}/OpenAITextToAudioExecutionSettings.cs (84%) delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs delete mode 100644 dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs delete mode 100644 dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NonStreamingTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_StreamingTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs rename dotnet/src/IntegrationTests/Connectors/OpenAI/{OpenAIToolsTests.cs => OpenAIChatCompletion_FunctionCallingTests.cs} (67%) create mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs create mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs delete mode 100644 dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs create mode 100644 dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json create mode 100644 dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json create mode 100644 dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json create mode 100644 dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json create mode 100644 dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json create mode 100644 dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json rename dotnet/src/{Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs => InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs} (65%) create mode 100644 dotnet/src/InternalUtilities/openai/OpenAIUtilities.props create mode 100644 dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs create mode 100644 dotnet/src/InternalUtilities/test/MoqExtensions.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs create mode 100644 dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs diff --git a/.github/_typos.toml b/.github/_typos.toml index 917745e1ae83..08b4ab37f906 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -16,6 +16,7 @@ extend-exclude = [ "test_code_tokenizer.py", "*response.json", "test_content.txt", + "serializedChatHistoryV1_15_1.json" ] [default.extend-words] diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index 366934c73314..dd83478b508b 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -96,6 +96,7 @@ jobs: AzureOpenAI__Label: azure-text-davinci-003 AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002 AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }} + AzureOpenAI__ChatDeploymentName: ${{ vars.AZUREOPENAI__CHATDEPLOYMENTNAME }} AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDING__DEPLOYMENTNAME }} AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }} AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }} @@ -110,14 +111,20 @@ jobs: OpenAITextToAudio__ModelId: ${{ vars.OPENAITEXTTOAUDIO__MODELID }} OpenAIAudioToText__ApiKey: ${{ secrets.OPENAIAUDIOTOTEXT__APIKEY }} OpenAIAudioToText__ModelId: ${{ vars.OPENAIAUDIOTOTEXT__MODELID }} + OpenAITextToImage__ApiKey: ${{ secrets.OPENAITEXTTOIMAGE__APIKEY }} + OpenAITextToImage__ModelId: ${{ vars.OPENAITEXTTOIMAGE__MODELID }} AzureOpenAITextToAudio__ApiKey: ${{ secrets.AZUREOPENAITEXTTOAUDIO__APIKEY }} AzureOpenAITextToAudio__Endpoint: ${{ secrets.AZUREOPENAITEXTTOAUDIO__ENDPOINT }} AzureOpenAITextToAudio__DeploymentName: ${{ vars.AZUREOPENAITEXTTOAUDIO__DEPLOYMENTNAME }} AzureOpenAIAudioToText__ApiKey: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__APIKEY }} AzureOpenAIAudioToText__Endpoint: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__ENDPOINT }} AzureOpenAIAudioToText__DeploymentName: ${{ vars.AZUREOPENAIAUDIOTOTEXT__DEPLOYMENTNAME }} + AzureOpenAITextToImage__ApiKey: ${{ secrets.AZUREOPENAITEXTTOIMAGE__APIKEY }} + AzureOpenAITextToImage__Endpoint: ${{ secrets.AZUREOPENAITEXTTOIMAGE__ENDPOINT }} + AzureOpenAITextToImage__DeploymentName: ${{ vars.AZUREOPENAITEXTTOIMAGE__DEPLOYMENTNAME }} Bing__ApiKey: ${{ secrets.BING__APIKEY }} OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }} + OpenAI__ChatModelId: ${{ vars.OPENAI__CHATMODELID }} # Generate test reports and check coverage - name: Generate test reports @@ -126,7 +133,7 @@ jobs: reports: "./TestResults/Coverage/**/coverage.cobertura.xml" targetdir: "./TestResults/Reports" reporttypes: "JsonSummary" - assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI" + assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Connectors.AzureOpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI" - name: Check coverage shell: pwsh diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props index 751afab85104..94d748c78057 100644 --- a/dotnet/Directory.Build.props +++ b/dotnet/Directory.Build.props @@ -11,6 +11,11 @@ disable + + + false + + disable @@ -30,4 +35,4 @@ <_Parameter1>false - \ No newline at end of file + diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index e2f8f0ce3e89..75d19fe11d0b 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -5,9 +5,10 @@ true + + - - + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index a3bc4f9a380d..cb04656ffb01 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -326,6 +326,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\De EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureCosmosDBNoSQL.UnitTests", "src\Connectors\Connectors.AzureCosmosDBNoSQL.UnitTests\Connectors.AzureCosmosDBNoSQL.UnitTests.csproj", "{385A8FE5-87E2-4458-AE09-35E10BD2E67F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.OpenAI.UnitTests", "src\Connectors\Connectors.OpenAI.UnitTests\Connectors.OpenAI.UnitTests.csproj", "{36DDC119-C030-407E-AC51-A877E9E0F660}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureOpenAI", "src\Connectors\Connectors.AzureOpenAI\Connectors.AzureOpenAI.csproj", "{7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureOpenAI.UnitTests", "src\Connectors\Connectors.AzureOpenAI.UnitTests\Connectors.AzureOpenAI.UnitTests.csproj", "{8CF06B22-50F3-4F71-A002-622DB49DF0F5}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -819,6 +825,24 @@ Global {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Publish|Any CPU.Build.0 = Debug|Any CPU {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.ActiveCfg = Release|Any CPU {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.Build.0 = Release|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.Build.0 = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.Build.0 = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.ActiveCfg = Release|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.Build.0 = Release|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.Build.0 = Publish|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.Build.0 = Release|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.Build.0 = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -931,6 +955,9 @@ Global {2918478E-BC86-4D53-9D01-9C318F80C14F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {385A8FE5-87E2-4458-AE09-35E10BD2E67F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {36DDC119-C030-407E-AC51-A877E9E0F660} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {8CF06B22-50F3-4F71-A002-622DB49DF0F5} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md new file mode 100644 index 000000000000..784de5347fb0 --- /dev/null +++ b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md @@ -0,0 +1,196 @@ +# OpenAI Connector Migration Guide + +This manual prepares you for the migration of your OpenAI Connector to the new OpenAI Connector. The new OpenAI Connector is a complete rewrite of the existing OpenAI Connector and is designed to be more efficient, reliable, and scalable. This manual will guide you through the migration process and help you understand the changes that have been made to the OpenAI Connector. + +## 1. Package Setup when Using Azure + +If you are working with Azure and or OpenAI public APIs, you will need to change the package from `Microsoft.SemanticKernel.Connectors.OpenAI` to `Microsoft.SemanticKernel.Connectors.AzureOpenAI`, + +> [!IMPORTANT] +> The `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package so there's no need to add both to your project when using `OpenAI` related types. + +```diff +- // Before +- using Microsoft.SemanticKernel.Connectors.OpenAI; ++ After ++ using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +``` + +### 1.1 AzureOpenAIClient + +When using Azure with OpenAI, before where you were using `OpenAIClient` you will need to update your code to use the new `AzureOpenAIClient` type. + +### 1.2 Services + +All services below now belong to the `Microsoft.SemanticKernel.Connectors.AzureOpenAI` namespace. + +- `AzureOpenAIAudioToTextService` +- `AzureOpenAIChatCompletionService` +- `AzureOpenAITextEmbeddingGenerationService` +- `AzureOpenAITextToAudioService` +- `AzureOpenAITextToImageService` + +## 2. Text Generation Deprecated + +The latest `OpenAI` SDK does not support text generation modality, when migrating to their underlying SDK we had to drop the support and removed `TextGeneration` specific services but the existing `ChatCompletion` ones still supports (implements `ITextGenerationService`). + +If you were using any of the `OpenAITextGenerationService` or `AzureOpenAITextGenerationService` you will need to update your code to target a chat completion model instead, using `OpenAIChatCompletionService` or `AzureOpenAIChatCompletionService` instead. + +> [!NOTE] +> OpenAI and AzureOpenAI `ChatCompletion` services also implement the `ITextGenerationService` interface and that may not require any changes to your code if you were targeting the `ITextGenerationService` interface. + +tags: +`OpenAITextGenerationService`,`AzureOpenAITextGenerationService`, +`AddOpenAITextGeneration`,`AddAzureOpenAITextGeneration` + +## 3. ChatCompletion Multiple Choices Deprecated + +The latest `OpenAI` SDK does not support multiple choices, when migrating to their underlying SDK we had to drop the support and removed `ResultsPerPrompt` also from the `OpenAIPromptExecutionSettings`. + +tags: `ResultsPerPrompt`,`results_per_prompt` + +## 4. OpenAI File Service Deprecation + +The `OpenAIFileService` was deprecated in the latest version of the OpenAI Connector. We strongly recommend to update your code to use the new `OpenAIClient.GetFileClient()` for file management operations. + +## 5. SemanticKernel MetaPackage + +To be retro compatible with the new OpenAI and AzureOpenAI Connectors, our `Microsoft.SemanticKernel` meta package changed its dependency to use the new `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package that depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package. This way if you are using the metapackage, no change is needed to get access to `Azure` related types. + +## 6. Contents + +### 6.1 OpenAIChatMessageContent + +- The `Tools` property type has changed from `IReadOnlyList` to `IReadOnlyList`. + +- Inner content type has changed from `ChatCompletionsFunctionToolCall` to `ChatToolCall`. + +- Metadata type `FunctionToolCalls` has changed from `IEnumerable` to `IEnumerable`. + +### 6.2 OpenAIStreamingChatMessageContent + +- The `FinishReason` property type has changed from `CompletionsFinishReason` to `FinishReason`. +- The `ToolCallUpdate` property has been renamed to `ToolCallUpdates` and its type has changed from `StreamingToolCallUpdate?` to `IReadOnlyList?`. +- The `AuthorName` property is not initialized because it's not provided by the underlying library anymore. + +## 6.3 Metrics for AzureOpenAI Connector + +The meter `s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI");` and the relevant counters still have old names that contain "openai" in them, such as: + +- `semantic_kernel.connectors.openai.tokens.prompt` +- `semantic_kernel.connectors.openai.tokens.completion` +- `semantic_kernel.connectors.openai.tokens.total` + +## 7. Using Azure with your data (Data Sources) + +With the new `AzureOpenAIClient`, you can now specify your datasource thru the options and that requires a small change in your code to the new type. + +Before + +```csharp +var promptExecutionSettings = new OpenAIPromptExecutionSettings +{ + AzureChatExtensionsOptions = new AzureChatExtensionsOptions + { + Extensions = [ new AzureSearchChatExtensionConfiguration + { + SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey), + IndexName = TestConfiguration.AzureAISearch.IndexName + }] + }; +}; +``` + +After + +```csharp +var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings +{ + AzureChatDataSource = new AzureSearchChatDataSource + { + Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey), + IndexName = TestConfiguration.AzureAISearch.IndexName + } +}; +``` + +## 8. Breaking glass scenarios + +Breaking glass scenarios are scenarios where you may need to update your code to use the new OpenAI Connector. Below are some of the breaking changes that you may need to be aware of. + +#### 8.1 KernelContent Metadata + +Some of the keys in the content metadata dictionary have changed, you will need to update your code to when using the previous key names. + +- `Created` -> `CreatedAt` + +#### 8.2 Prompt Filter Results + +The `PromptFilterResults` metadata type has changed from `IReadOnlyList` to `ContentFilterResultForPrompt`. + +#### 8.3 Content Filter Results + +The `ContentFilterResultsForPrompt` type has changed from `ContentFilterResultsForChoice` to `ContentFilterResultForResponse`. + +#### 8.4 Finish Reason + +The FinishReason metadata string value has changed from `stop` to `Stop` + +#### 8.5 Tool Calls + +The ToolCalls metadata string value has changed from `tool_calls` to `ToolCalls` + +#### 8.6 LogProbs / Log Probability Info + +The `LogProbabilityInfo` type has changed from `ChatChoiceLogProbabilityInfo` to `IReadOnlyList`. + +#### 8.7 Finish Details, Index, and Enhancements + +All of above have been removed. + +#### 8.8 Token Usage + +The Token usage naming convention from `OpenAI` changed from `Completion`, `Prompt` tokens to `Output` and `Input` respectively. You will need to update your code to use the new naming. + +The type also changed from `CompletionsUsage` to `ChatTokenUsage`. + +[Example of Token Usage Metadata Changes](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-a323107b9f8dc8559a83e50080c6e34551ddf6d9d770197a473f249589e8fb47) + +```diff +- Before +- var usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage; +- var completionTokesn = usage?.CompletionTokens ?? 0; +- var promptTokens = usage?.PromptTokens ?? 0; + ++ After ++ var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage; ++ var promptTokens = usage?.InputTokens ?? 0; ++ var completionTokens = completionTokens: usage?.OutputTokens ?? 0; + +totalTokens: usage?.TotalTokens ?? 0; +``` + +#### 8.9 OpenAIClient + +The `OpenAIClient` type previously was a Azure specific namespace type but now it is an `OpenAI` SDK namespace type, you will need to update your code to use the new `OpenAIClient` type. + +When using Azure, you will need to update your code to use the new `AzureOpenAIClient` type. + +#### 8.10 Pipeline Configuration + +The new `OpenAI` SDK uses a different pipeline configuration, and has a dependency on `System.ClientModel` package. You will need to update your code to use the new `HttpClientPipelineTransport` transport configuration where before you were using `HttpClientTransport` from `Azure.Core.Pipeline`. + +[Example of Pipeline Configuration](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-fab02d9a75bf43cb57f71dddc920c3f72882acf83fb125d8cad963a643d26eb3) + +```diff +var clientOptions = new OpenAIClientOptions +{ +- // Before: From Azure.Core.Pipeline +- Transport = new HttpClientTransport(httpClient), + ++ // After: From OpenAI SDK -> System.ClientModel ++ Transport = new HttpClientPipelineTransport(httpClient), +}; +``` diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props index 78c6592ad176..851e17bc86f9 100644 --- a/dotnet/nuget/nuget-package.props +++ b/dotnet/nuget/nuget-package.props @@ -1,8 +1,7 @@ - 1.17.2 - + 1.18.2 $(VersionPrefix)-$(VersionSuffix) $(VersionPrefix) @@ -10,7 +9,7 @@ true - 1.17.2 + 1.18.0-rc $(NoWarn);CP0003 diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs index 16c019aebbfd..d0b8e92d39d7 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs @@ -12,7 +12,7 @@ namespace Agents; /// Demonstrate usage of for both direction invocation /// of and via . /// -public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseTest(output) +public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseAgentsTest(output) { [Fact] public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() @@ -44,25 +44,25 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() Console.WriteLine("================================"); foreach (ChatMessageContent message in chat) { - this.WriteContent(message); + this.WriteAgentChatMessage(message); } // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - ChatMessageContent userContent = new(AuthorRole.User, input); - chat.Add(userContent); - this.WriteContent(userContent); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) { // Do not add a message implicitly added to the history. - if (!content.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) + if (!response.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) { - chat.Add(content); + chat.Add(response); } - this.WriteContent(content); + this.WriteAgentChatMessage(response); } } } @@ -98,28 +98,23 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); for (int index = history.Length; index > 0; --index) { - this.WriteContent(history[index - 1]); + this.WriteAgentChatMessage(history[index - 1]); } // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - ChatMessageContent userContent = new(AuthorRole.User, input); - chat.AddChatMessage(userContent); - this.WriteContent(userContent); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - this.WriteContent(content); + this.WriteAgentChatMessage(response); } } } - private void WriteContent(ChatMessageContent content) - { - Console.WriteLine($"[{content.Items.LastOrDefault()?.GetType().Name ?? "(empty)"}] {content.Role} : '{content.Content}'"); - } - private Kernel CreateKernelWithFilter() { IKernelBuilder builder = Kernel.CreateBuilder(); diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs index d3e94386af96..575db7f7f288 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs @@ -12,7 +12,7 @@ namespace Agents; /// Demonstrate creation of and /// eliciting its response to three explicit user messages. /// -public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseTest(output) +public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ParrotName = "Parrot"; private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; @@ -66,32 +66,33 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync() // Local function to invoke agent and display the conversation messages. private async Task InvokeAgentAsync(ChatCompletionAgent agent, ChatHistory chat, string input) { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); StringBuilder builder = new(); - await foreach (StreamingChatMessageContent message in agent.InvokeStreamingAsync(chat)) + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat)) { - if (string.IsNullOrEmpty(message.Content)) + if (string.IsNullOrEmpty(response.Content)) { continue; } if (builder.Length == 0) { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}:"); + Console.WriteLine($"# {response.Role} - {response.AuthorName ?? "*"}:"); } - Console.WriteLine($"\t > streamed: '{message.Content}'"); - builder.Append(message.Content); + Console.WriteLine($"\t > streamed: '{response.Content}'"); + builder.Append(response.Content); } if (builder.Length > 0) { // Display full response and capture in chat history - Console.WriteLine($"\t > complete: '{builder}'"); - chat.Add(new ChatMessageContent(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name }); + ChatMessageContent response = new(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name }; + chat.Add(response); + this.WriteAgentChatMessage(response); } } diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs index aae984906ba3..0d7b27917d78 100644 --- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs +++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Resources; namespace Agents; @@ -13,10 +13,8 @@ namespace Agents; /// Demonstrate usage of and /// to manage execution. /// -public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseTest(output) +public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseAgentsTest(output) { - protected override bool ForceOpenAI => true; - private const string InternalLeaderName = "InternalLeader"; private const string InternalLeaderInstructions = """ @@ -98,7 +96,7 @@ public async Task NestedChatWithAggregatorAgentAsync() { Console.WriteLine($"! {Model}"); - OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatCompletionsResponseFormat.JsonObject }; + OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatResponseFormat.JsonObject }; OpenAIPromptExecutionSettings autoInvokeSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; ChatCompletionAgent internalLeaderAgent = CreateAgent(InternalLeaderName, InternalLeaderInstructions); @@ -154,20 +152,20 @@ public async Task NestedChatWithAggregatorAgentAsync() Console.WriteLine(">>>> AGGREGATED CHAT"); Console.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); - await foreach (ChatMessageContent content in chat.GetChatMessagesAsync(personalShopperAgent).Reverse()) + await foreach (ChatMessageContent message in chat.GetChatMessagesAsync(personalShopperAgent).Reverse()) { - Console.WriteLine($">>>> {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(message); } async Task InvokeChatAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(personalShopperAgent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(personalShopperAgent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } Console.WriteLine($"\n# IS COMPLETE: {chat.IsComplete}"); diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs index 062262fe8a8c..53276c75a24d 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs @@ -9,12 +9,6 @@ namespace Agents; /// public class Legacy_AgentAuthoring(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - // Track agents for clean-up private static readonly List s_agents = []; @@ -72,7 +66,7 @@ private static async Task CreateArticleGeneratorAsync() return Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .WithInstructions("You write concise opinionated articles that are published online. Use an outline to generate an article with one section of prose for each top-level outline element. Each section is based on research with a maximum of 120 words.") .WithName("Article Author") .WithDescription("Author an article on a given topic.") @@ -87,7 +81,7 @@ private static async Task CreateOutlineGeneratorAsync() return Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .WithInstructions("Produce an single-level outline (no child elements) based on the given topic with at most 3 sections.") .WithName("Outline Generator") .WithDescription("Generate an outline.") @@ -100,7 +94,7 @@ private static async Task CreateResearchGeneratorAsync() return Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .WithInstructions("Provide insightful research that supports the given topic based on your knowledge of the outline topic.") .WithName("Researcher") .WithDescription("Author research summary.") diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs index 877ba0971710..d40755101309 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs @@ -1,8 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Diagnostics; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Azure.AI.OpenAI; using Microsoft.SemanticKernel.Experimental.Agents; +using OpenAI; +using OpenAI.Files; namespace Agents; @@ -12,28 +14,15 @@ namespace Agents; /// public sealed class Legacy_AgentCharts(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - private new const bool ForceOpenAI = false; - /// /// Create a chart and retrieve by file_id. /// - [Fact(Skip = "Launches external processes")] + [Fact] public async Task CreateChartAsync() { Console.WriteLine("======== Using CodeInterpreter tool ========"); - var fileService = CreateFileService(); + FileClient fileClient = CreateFileClient(); var agent = await CreateAgentBuilder().WithCodeInterpreter().BuildAsync(); @@ -69,11 +58,11 @@ async Task InvokeAgentAsync(IAgentThread thread, string imageName, string questi { var filename = $"{imageName}.jpg"; var path = Path.Combine(Environment.CurrentDirectory, filename); - Console.WriteLine($"# {message.Role}: {message.Content}"); + var fileId = message.Content; + Console.WriteLine($"# {message.Role}: {fileId}"); Console.WriteLine($"# {message.Role}: {path}"); - var content = await fileService.GetFileContentAsync(message.Content); - await using var outputStream = File.OpenWrite(filename); - await outputStream.WriteAsync(content.Data!.Value); + BinaryData content = await fileClient.DownloadFileAsync(fileId); + File.WriteAllBytes(filename, content.ToArray()); Process.Start( new ProcessStartInfo { @@ -91,19 +80,23 @@ async Task InvokeAgentAsync(IAgentThread thread, string imageName, string questi } } - private static OpenAIFileService CreateFileService() + private FileClient CreateFileClient() + { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new OpenAIFileService(TestConfiguration.OpenAI.ApiKey) : - new OpenAIFileService(new Uri(TestConfiguration.AzureOpenAI.Endpoint), apiKey: TestConfiguration.AzureOpenAI.ApiKey); + OpenAIClient client = + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + new OpenAIClient(TestConfiguration.OpenAI.ApiKey) : + new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), TestConfiguration.AzureOpenAI.ApiKey); + + return client.GetFileClient(); } +#pragma warning restore CS0618 // Type or member is obsolete - private static AgentBuilder CreateAgentBuilder() + private AgentBuilder CreateAgentBuilder() { return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) : new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); } } diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs index 53ae0c07662a..fa257d2764b3 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs @@ -9,17 +9,6 @@ namespace Agents; /// public class Legacy_AgentCollaboration(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-turbo-preview"; - - /// - /// Set this to 'true' to target OpenAI instead of Azure OpenAI. - /// - private const bool UseOpenAI = false; - // Track agents for clean-up private static readonly List s_agents = []; @@ -29,8 +18,6 @@ public class Legacy_AgentCollaboration(ITestOutputHelper output) : BaseTest(outp [Fact(Skip = "This test take more than 5 minutes to execute")] public async Task RunCollaborationAsync() { - Console.WriteLine($"======== Example72:Collaboration:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); - IAgentThread? thread = null; try { @@ -82,8 +69,6 @@ public async Task RunCollaborationAsync() [Fact(Skip = "This test take more than 2 minutes to execute")] public async Task RunAsPluginsAsync() { - Console.WriteLine($"======== Example72:AsPlugins:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); - try { // Create copy-writer agent to generate ideas @@ -113,7 +98,7 @@ await CreateAgentBuilder() } } - private static async Task CreateCopyWriterAsync(IAgent? agent = null) + private async Task CreateCopyWriterAsync(IAgent? agent = null) { return Track( @@ -125,7 +110,7 @@ await CreateAgentBuilder() .BuildAsync()); } - private static async Task CreateArtDirectorAsync() + private async Task CreateArtDirectorAsync() { return Track( @@ -136,13 +121,13 @@ await CreateAgentBuilder() .BuildAsync()); } - private static AgentBuilder CreateAgentBuilder() + private AgentBuilder CreateAgentBuilder() { var builder = new AgentBuilder(); return - UseOpenAI ? - builder.WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + builder.WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) : builder.WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); } diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs index 86dacb9c256d..b4b0ed93199f 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs @@ -12,12 +12,6 @@ namespace Agents; /// public class Legacy_AgentDelegation(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; - // Track agents for clean-up private static readonly List s_agents = []; @@ -27,8 +21,6 @@ public class Legacy_AgentDelegation(ITestOutputHelper output) : BaseTest(output) [Fact] public async Task RunAsync() { - Console.WriteLine("======== Example71_AgentDelegation ========"); - if (TestConfiguration.OpenAI.ApiKey is null) { Console.WriteLine("OpenAI apiKey not found. Skipping example."); @@ -39,11 +31,11 @@ public async Task RunAsync() try { - var plugin = KernelPluginFactory.CreateFromType(); + var plugin = KernelPluginFactory.CreateFromType(); var menuAgent = Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) .WithDescription("Answer questions about how the menu uses the tool.") .WithPlugin(plugin) @@ -52,14 +44,14 @@ public async Task RunAsync() var parrotAgent = Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) .BuildAsync()); var toolAgent = Track( await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) .WithPlugin(parrotAgent.AsPlugin()) .WithPlugin(menuAgent.AsPlugin()) diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs index 66d93ecc88d9..00af8faab617 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs @@ -1,8 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Azure.AI.OpenAI; using Microsoft.SemanticKernel.Experimental.Agents; +using OpenAI; +using OpenAI.Files; using Resources; namespace Agents; @@ -13,21 +14,8 @@ namespace Agents; /// public sealed class Legacy_AgentTools(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - /// - /// NOTE: Retrieval tools is not currently available on Azure. - /// - private new const bool ForceOpenAI = true; + /// + protected override bool ForceOpenAI => true; // Track agents for clean-up private readonly List _agents = []; @@ -79,12 +67,13 @@ public async Task RunRetrievalToolAsync() return; } - Kernel kernel = CreateFileEnabledKernel(); - var fileService = kernel.GetRequiredService(); - var result = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"), - new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants)); + FileClient fileClient = CreateFileClient(); + + OpenAIFileInfo result = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!), + "travelinfo.txt", + FileUploadPurpose.Assistants); var fileId = result.Id; Console.WriteLine($"! {fileId}"); @@ -110,7 +99,7 @@ await ChatAsync( } finally { - await Task.WhenAll(this._agents.Select(a => a.DeleteAsync()).Append(fileService.DeleteFileAsync(fileId))); + await Task.WhenAll(this._agents.Select(a => a.DeleteAsync()).Append(fileClient.DeleteFileAsync(fileId))); } } @@ -165,19 +154,21 @@ async Task InvokeAgentAsync(IAgent agent, string question) } } - private static Kernel CreateFileEnabledKernel() + private FileClient CreateFileClient() { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - Kernel.CreateBuilder().AddOpenAIFiles(TestConfiguration.OpenAI.ApiKey).Build() : - Kernel.CreateBuilder().AddAzureOpenAIFiles(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey).Build(); + OpenAIClient client = + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + new OpenAIClient(TestConfiguration.OpenAI.ApiKey) : + new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), TestConfiguration.AzureOpenAI.ApiKey); + + return client.GetFileClient(); } - private static AgentBuilder CreateAgentBuilder() + private AgentBuilder CreateAgentBuilder() { return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) : new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); } diff --git a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs b/dotnet/samples/Concepts/Agents/Legacy_Agents.cs index 5af10987bb3a..31cc4926392b 100644 --- a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs +++ b/dotnet/samples/Concepts/Agents/Legacy_Agents.cs @@ -13,19 +13,6 @@ namespace Agents; /// public class Legacy_Agents(ITestOutputHelper output) : BaseTest(output) { - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - private new const bool ForceOpenAI = false; - /// /// Chat using the "Parrot" agent. /// Tools/functions: None @@ -61,18 +48,12 @@ public async Task RunWithMethodFunctionsAsync() await ChatAsync( "Agents.ToolAgent.yaml", // Defined under ./Resources/Agents plugin, - arguments: new() { { LegacyMenuPlugin.CorrelationIdArgument, 3.141592653 } }, + arguments: null, "Hello", "What is the special soup?", "What is the special drink?", "Do you have enough soup for 5 orders?", "Thank you!"); - - Console.WriteLine("\nCorrelation Ids:"); - foreach (string correlationId in menuApi.CorrelationIds) - { - Console.WriteLine($"- {correlationId}"); - } } /// @@ -114,7 +95,7 @@ public async Task RunAsFunctionAsync() // Create parrot agent, same as the other cases. var agent = await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) .BuildAsync(); @@ -187,11 +168,11 @@ await Task.WhenAll( } } - private static AgentBuilder CreateAgentBuilder() + private AgentBuilder CreateAgentBuilder() { return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : + this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? + new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) : new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs index d3a894dd6c8e..21b19c1d342c 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs @@ -10,7 +10,7 @@ namespace Agents; /// Demonstrate that two different agent types are able to participate in the same conversation. /// In this case a and participate. /// -public class MixedChat_Agents(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -47,12 +47,12 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync() OpenAIAssistantAgent agentWriter = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - definition: new() + clientProvider: this.GetClientProvider(), + definition: new(this.Model) { Instructions = CopyWriterInstructions, Name = CopyWriterName, - ModelId = this.Model, + Metadata = AssistantSampleMetadata, }); // Create a chat for agent interaction. @@ -76,16 +76,16 @@ await OpenAIAssistantAgent.CreateAsync( }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs index 5d96de68da72..0219c25f7712 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs @@ -1,10 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; using Resources; namespace Agents; @@ -13,24 +12,22 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces file output. /// -public class MixedChat_Files(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language."; [Fact] public async Task AnalyzeFileAndGenerateReportAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); + + FileClient fileClient = provider.Client.GetFileClient(); - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("30-user-context.txt"), mimeType: "text/plain"), - new OpenAIFileUploadExecutionSettings("30-user-context.txt", OpenAIFilePurpose.Assistants)); + OpenAIFileInfo uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")), + "30-user-context.txt", + FileUploadPurpose.Assistants); Console.WriteLine(this.ApiKey); @@ -38,12 +35,12 @@ await fileService.UploadContentAsync( OpenAIAssistantAgent analystAgent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + new(this.Model) { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file with assistant + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter + Metadata = AssistantSampleMetadata, }); ChatCompletionAgent summaryAgent = @@ -70,7 +67,7 @@ Create a tab delimited file report of the ordered (descending) frequency distrib finally { await analystAgent.DeleteAsync(); - await fileService.DeleteFileAsync(uploadFile.Id); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. @@ -78,21 +75,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { + ChatMessageContent message = new(AuthorRole.User, input); chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"\n# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - - foreach (AnnotationContent annotation in content.Items.OfType()) - { - Console.WriteLine($"\t* '{annotation.Quote}' => {annotation.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - Console.WriteLine($"\n{Encoding.Default.GetString(byteContent)}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs index 385577573ac6..437643e25574 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; namespace Agents; @@ -11,13 +11,8 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces image output. /// -public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - private const string AnalystName = "Analyst"; private const string AnalystInstructions = "Create charts as requested without explanation."; @@ -27,19 +22,21 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output) [Fact] public async Task AnalyzeDataAndGenerateChartAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); + + FileClient fileClient = provider.Client.GetFileClient(); // Define the agents OpenAIAssistantAgent analystAgent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + new(this.Model) { Instructions = AnalystInstructions, Name = AnalystName, EnableCodeInterpreter = true, - ModelId = this.Model, + Metadata = AssistantSampleMetadata, }); ChatCompletionAgent summaryAgent = @@ -86,26 +83,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { - chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } - - foreach (FileReferenceContent fileReference in message.Items.OfType()) - { - Console.WriteLine($"\t* Generated image - @{fileReference.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(fileReference.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - string filePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); - await File.WriteAllBytesAsync($"{filePath}.png", byteContent); - Console.WriteLine($"\t* Local path - {filePath}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs index 92aa8a9ce9d4..f9afcc55b7f5 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs @@ -3,14 +3,13 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; /// /// Demonstrate the use of . /// -public class MixedChat_Reset(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output) { private const string AgentInstructions = """ @@ -21,18 +20,17 @@ The user may either provide information or query on information previously provi [Fact] public async Task ResetChatAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); // Define the agents OpenAIAssistantAgent assistantAgent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + new(this.Model) { Name = nameof(OpenAIAssistantAgent), Instructions = AgentInstructions, - ModelId = this.Model, }); ChatCompletionAgent chatAgent = @@ -74,16 +72,14 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { - chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"\n# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } + this.WriteAgentChatMessage(response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs index ef5ba80154fa..cd81f7c4d187 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs @@ -3,6 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; namespace Agents; @@ -10,30 +11,29 @@ namespace Agents; /// Demonstrate using code-interpreter with to /// produce image content displays the requested charts. /// -public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseTest(output) +public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target Open AI services. - /// - protected override bool ForceOpenAI => true; - private const string AgentName = "ChartMaker"; private const string AgentInstructions = "Create charts as requested without explanation."; [Fact] public async Task GenerateChartWithOpenAIAssistantAgentAsync() { + OpenAIClientProvider provider = this.GetClientProvider(); + + FileClient fileClient = provider.Client.GetFileClient(); + // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + new(this.Model) { Instructions = AgentInstructions, Name = AgentName, EnableCodeInterpreter = true, - ModelId = this.Model, + Metadata = AssistantSampleMetadata, }); // Create a chat for agent interaction. @@ -55,6 +55,7 @@ Sum 426 1622 856 2904 """); await InvokeAgentAsync("Can you regenerate this same chart using the category names as the bar colors?"); + await InvokeAgentAsync("Perfect, can you regenerate this as a line chart?"); } finally { @@ -64,21 +65,14 @@ Sum 426 1622 856 2904 // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } - - foreach (FileReferenceContent fileReference in message.Items.OfType()) - { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: @{fileReference.FileId}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs index 8e64006ee9d3..dc4af2ad2743 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs @@ -1,10 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; using Resources; namespace Agents; @@ -12,35 +11,31 @@ namespace Agents; /// /// Demonstrate using code-interpreter to manipulate and generate csv files with . /// -public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseTest(output) +public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - [Fact] public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("sales.csv"), mimeType: "text/plain"), - new OpenAIFileUploadExecutionSettings("sales.csv", OpenAIFilePurpose.Assistants)); + FileClient fileClient = provider.Client.GetFileClient(); - Console.WriteLine(this.ApiKey); + OpenAIFileInfo uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!), + "sales.csv", + FileUploadPurpose.Assistants); // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + new(this.Model) { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], + Metadata = AssistantSampleMetadata, }); // Create a chat for agent interaction. @@ -56,27 +51,20 @@ await OpenAIAssistantAgent.CreateAsync( finally { await agent.DeleteAsync(); - await fileService.DeleteFileAsync(uploadFile.Id); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - - foreach (AnnotationContent annotation in content.Items.OfType()) - { - Console.WriteLine($"\n* '{annotation.Quote}' => {annotation.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - Console.WriteLine(Encoding.Default.GetString(byteContent)); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs index 7537f53da726..a8f31622c753 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs @@ -18,6 +18,7 @@ public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(ou [Fact] public async Task UploadAndRetrieveFilesAsync() { +#pragma warning disable CS0618 // Type or member is obsolete OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); BinaryContent[] files = [ @@ -27,7 +28,7 @@ public async Task UploadAndRetrieveFilesAsync() new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" } ]; - var fileContents = new Dictionary(); + Dictionary fileContents = new(); foreach (BinaryContent file in files) { OpenAIFileReference result = await fileService.UploadContentAsync(file, new(file.InnerContent!.ToString()!, OpenAIFilePurpose.FineTune)); @@ -48,7 +49,7 @@ public async Task UploadAndRetrieveFilesAsync() string? fileName = fileContents[fileReference.Id].InnerContent!.ToString(); ReadOnlyMemory data = content.Data ?? new(); - var typedContent = mimeType switch + BinaryContent typedContent = mimeType switch { "image/jpeg" => new ImageContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }, "audio/wav" => new AudioContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }, @@ -62,5 +63,7 @@ public async Task UploadAndRetrieveFilesAsync() // Delete the test file remotely await fileService.DeleteFileAsync(fileReference.Id); } + +#pragma warning restore CS0618 // Type or member is obsolete } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs deleted file mode 100644 index 6f30b6974ff7..000000000000 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Resources; - -namespace Agents; - -/// -/// Demonstrate using retrieval on . -/// -public class OpenAIAssistant_Retrieval(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Retrieval tool not supported on Azure OpenAI. - /// - protected override bool ForceOpenAI => true; - - [Fact] - public async Task UseRetrievalToolWithOpenAIAssistantAgentAsync() - { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); - - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync(new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"), - new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants)); - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() - { - EnableRetrieval = true, // Enable retrieval - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file - }); - - // Create a chat for agent interaction. - AgentGroupChat chat = new(); - - // Respond to user input - try - { - await InvokeAgentAsync("Where did sam go?"); - await InvokeAgentAsync("When does the flight leave Seattle?"); - await InvokeAgentAsync("What is the hotel contact info at the destination?"); - } - finally - { - await agent.DeleteAsync(); - } - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) - { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs index dcfdf7b511f0..39ce395b27b7 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; +using Azure.AI.OpenAI.Chat; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using xRetry; namespace ChatCompletion; @@ -47,8 +47,8 @@ public async Task ExampleWithChatCompletionAsync() chatHistory.AddUserMessage(ask); // Chat Completion example - var chatExtensionsOptions = GetAzureChatExtensionsOptions(); - var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions }; + var dataSource = GetAzureSearchDataSource(); + var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource }; var chatCompletion = kernel.GetRequiredService(); @@ -98,8 +98,8 @@ public async Task ExampleWithKernelAsync() var function = kernel.CreateFunctionFromPrompt("Question: {{$input}}"); - var chatExtensionsOptions = GetAzureChatExtensionsOptions(); - var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions }; + var dataSource = GetAzureSearchDataSource(); + var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource }; // First question without previous context based on uploaded content. var response = await kernel.InvokeAsync(function, new(promptExecutionSettings) { ["input"] = ask }); @@ -125,20 +125,15 @@ public async Task ExampleWithKernelAsync() } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - private static AzureChatExtensionsOptions GetAzureChatExtensionsOptions() + private static AzureSearchChatDataSource GetAzureSearchDataSource() { - var azureSearchExtensionConfiguration = new AzureSearchChatExtensionConfiguration + return new AzureSearchChatDataSource { - SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), - Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey), + Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey), IndexName = TestConfiguration.AzureAISearch.IndexName }; - - return new AzureChatExtensionsOptions - { - Extensions = { azureSearchExtensionConfiguration } - }; } } diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs index 05346974da2f..2d08c507aa4c 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs @@ -2,6 +2,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; namespace ChatCompletion; diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs index 46aadfc243b0..42164d3fe8dc 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs @@ -2,6 +2,7 @@ using Azure.Identity; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; namespace ChatCompletion; diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs deleted file mode 100644 index 9534cac09a63..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -/// -/// The following example shows how to use Semantic Kernel with multiple chat completion results. -/// -public class OpenAI_ChatCompletionMultipleChoices(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Example with multiple chat completion results using . - /// - [Fact] - public async Task MultipleChatCompletionResultsUsingKernelAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Execution settings with configured ResultsPerPrompt property. - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - var contents = await kernel.InvokePromptAsync>("Write a paragraph about why AI is awesome", new(executionSettings)); - - foreach (var content in contents!) - { - Console.Write(content.ToString() ?? string.Empty); - Console.WriteLine("\n-------------\n"); - } - } - - /// - /// Example with multiple chat completion results using . - /// - [Fact] - public async Task MultipleChatCompletionResultsUsingChatCompletionServiceAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Execution settings with configured ResultsPerPrompt property. - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage("Write a paragraph about why AI is awesome"); - - var chatCompletionService = kernel.GetRequiredService(); - - foreach (var chatMessageContent in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings)) - { - Console.Write(chatMessageContent.Content ?? string.Empty); - Console.WriteLine("\n-------------\n"); - } - } - - /// - /// This example shows how to handle multiple results in case if prompt template contains a call to another prompt function. - /// is used for result selection. - /// - [Fact] - public async Task MultipleChatCompletionResultsInPromptTemplateAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - // Initializing a function with execution settings for multiple results. - // We ask AI to write one paragraph, but in execution settings we specified that we want 3 different results for this request. - var function = KernelFunctionFactory.CreateFromPrompt("Write a paragraph about why AI is awesome", executionSettings, "GetParagraph"); - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); - - kernel.Plugins.Add(plugin); - - // Add function result selection filter. - kernel.FunctionInvocationFilters.Add(new FunctionResultSelectionFilter(this.Output)); - - // Inside our main request, we call MyPlugin.GetParagraph function for text summarization. - // Taking into account that MyPlugin.GetParagraph function produces 3 results, for text summarization we need to choose only one of them. - // Registered filter will be invoked during execution, which will select and return only 1 result, and this result will be inserted in our main request for summarization. - var result = await kernel.InvokePromptAsync("Summarize this text: {{MyPlugin.GetParagraph}}"); - - // It's possible to check what prompt was rendered for our main request. - Console.WriteLine($"Rendered prompt: '{result.RenderedPrompt}'"); - - // Output: - // Rendered prompt: 'Summarize this text: AI is awesome because...' - } - - /// - /// Example of filter which is responsible for result selection in case if some function produces multiple results. - /// - private sealed class FunctionResultSelectionFilter(ITestOutputHelper output) : IFunctionInvocationFilter - { - public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next) - { - await next(context); - - // Selection logic for function which is expected to produce multiple results. - if (context.Function.Name == "GetParagraph") - { - // Get multiple results from function invocation - var contents = context.Result.GetValue>()!; - - output.WriteLine("Multiple results:"); - - foreach (var content in contents) - { - output.WriteLine(content.ToString()); - } - - // Select first result for correct prompt rendering - var selectedContent = contents[0]; - context.Result = new FunctionResult(context.Function, selectedContent, context.Kernel.Culture, selectedContent.Metadata); - } - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs index 4836dcf03d9f..bd1285e29af3 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs @@ -2,6 +2,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; namespace ChatCompletion; diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs deleted file mode 100644 index 6a23a43ae9f8..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -// The following example shows how to use Semantic Kernel with multiple streaming chat completion results. -public class OpenAI_ChatCompletionStreamingMultipleChoices(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public Task AzureOpenAIMultiStreamingChatCompletionAsync() - { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completions - Raw Streaming ========"); - - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - - return StreamingChatCompletionAsync(chatCompletionService, 3); - } - - [Fact] - public Task OpenAIMultiStreamingChatCompletionAsync() - { - Console.WriteLine("======== OpenAI - Multiple Chat Completions - Raw Streaming ========"); - - OpenAIChatCompletionService chatCompletionService = new( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey); - - return StreamingChatCompletionAsync(chatCompletionService, 3); - } - - /// - /// Streams the results of a chat completion request to the console. - /// - /// Chat completion service to use - /// Number of results to get for each chat completion request - private async Task StreamingChatCompletionAsync(IChatCompletionService chatCompletionService, - int numResultsPerPrompt) - { - var executionSettings = new OpenAIPromptExecutionSettings() - { - MaxTokens = 200, - FrequencyPenalty = 0, - PresencePenalty = 0, - Temperature = 1, - TopP = 0.5, - ResultsPerPrompt = numResultsPerPrompt - }; - - var consoleLinesPerResult = 10; - - // Uncomment this if you want to use a console app to display the results - // ClearDisplayByAddingEmptyLines(); - - var prompt = "Hi, I'm looking for 5 random title names for sci-fi books"; - - await ProcessStreamAsyncEnumerableAsync(chatCompletionService, prompt, executionSettings, consoleLinesPerResult); - - Console.WriteLine(); - - // Set cursor position to after displayed results - // Console.SetCursorPosition(0, executionSettings.ResultsPerPrompt * consoleLinesPerResult); - - Console.WriteLine(); - } - - /// - /// Does the actual streaming and display of the chat completion. - /// - private async Task ProcessStreamAsyncEnumerableAsync(IChatCompletionService chatCompletionService, string prompt, - OpenAIPromptExecutionSettings executionSettings, int consoleLinesPerResult) - { - var messagesPerChoice = new Dictionary(); - var chatHistory = new ChatHistory(prompt); - - // For each chat completion update - await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings)) - { - // Set cursor position to the beginning of where this choice (i.e. this result of - // a single multi-result request) is to be displayed. - // Console.SetCursorPosition(0, chatUpdate.ChoiceIndex * consoleLinesPerResult + 1); - - // The first time around, start choice text with role information - if (!messagesPerChoice.ContainsKey(chatUpdate.ChoiceIndex)) - { - messagesPerChoice[chatUpdate.ChoiceIndex] = $"Role: {chatUpdate.Role ?? new AuthorRole()}\n"; - Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Role: {chatUpdate.Role ?? new AuthorRole()}"); - } - - // Add latest completion bit, if any - if (chatUpdate.Content is { Length: > 0 }) - { - messagesPerChoice[chatUpdate.ChoiceIndex] += chatUpdate.Content; - } - - // Overwrite what is currently in the console area for the updated choice - // Console.Write(messagesPerChoice[chatUpdate.ChoiceIndex]); - Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Content: {chatUpdate.Content}"); - } - - // Display the aggregated results - foreach (string message in messagesPerChoice.Values) - { - Console.WriteLine("-------------------"); - Console.WriteLine(message); - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs index 9e63e4b46975..64228f692799 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +using System.ClientModel.Primitives; using Azure; using Azure.AI.OpenAI; -using Azure.Core.Pipeline; using Microsoft.SemanticKernel; namespace ChatCompletion; @@ -28,12 +28,12 @@ public async Task RunAsync() var httpClient = new HttpClient(); httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); - // Configure OpenAIClient to use the customized HttpClient - var clientOptions = new OpenAIClientOptions + // Configure AzureOpenAIClient to use the customized HttpClient + var clientOptions = new AzureOpenAIClientOptions { - Transport = new HttpClientTransport(httpClient), + Transport = new HttpClientPipelineTransport(httpClient), }; - var openAIClient = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions); + var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions); IKernelBuilder builder = Kernel.CreateBuilder(); builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient); diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 89cc2c897d61..aa303046bd36 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,7 +8,7 @@ false true - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 @@ -41,10 +41,14 @@ - + + + true + + @@ -100,13 +104,16 @@ - + Always - + - + Always - + + + Always + diff --git a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs index 4c6e38452fc6..21abae070cf0 100644 --- a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs +++ b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs @@ -14,7 +14,7 @@ public async Task RunAsync() { ServiceCollection collection = new(); collection.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)); - collection.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + collection.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); collection.AddSingleton(); // Registering class that uses Kernel to execute a plugin diff --git a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs index 0b50562583ea..79826de22bec 100644 --- a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs +++ b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs @@ -2,8 +2,8 @@ using System.Diagnostics; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; +using OpenAI.Chat; namespace Functions; @@ -79,11 +79,11 @@ public FunctionResultTestDataGen(FunctionResult functionResult, long executionTi private TokenCounts? ParseTokenCounts() { - CompletionsUsage? usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage; + var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage; return new TokenCounts( - completionTokens: usage?.CompletionTokens ?? 0, - promptTokens: usage?.PromptTokens ?? 0, + completionTokens: usage?.OutputTokens ?? 0, + promptTokens: usage?.InputTokens ?? 0, totalTokens: usage?.TotalTokens ?? 0); } diff --git a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs index 04a74656e948..fb96579f32a1 100644 --- a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs +++ b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.ML.Tokenizers; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Text; namespace Memory; diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs index fbc313adebf4..883195b68df9 100644 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs +++ b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs @@ -4,7 +4,7 @@ using System.Text.Json; using System.Text.Unicode; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Plugins.Memory; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index db8e259f4e7a..cbfc5c1b0b24 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -3,7 +3,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Memory.VectorStoreFixtures; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Data; using Microsoft.SemanticKernel.Embeddings; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs index 18f0e5b476ca..6aa4d84cebab 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs @@ -4,7 +4,7 @@ using Memory.VectorStoreFixtures; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Data; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs index 341e5c2bbda2..75013b8196ac 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs @@ -2,7 +2,7 @@ using System.Text.Json; using Memory.VectorStoreFixtures; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Data; using Microsoft.SemanticKernel.Embeddings; diff --git a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs index 4c287a63a216..38e3e53a0e74 100644 --- a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs +++ b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs @@ -7,13 +7,13 @@ using System.Security.Cryptography; using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Planning; +using OpenAI.Chat; namespace Planners; @@ -328,7 +328,7 @@ private int GetChatHistoryTokens(ChatHistory? chatHistory) { if (message.Metadata is not null && message.Metadata.TryGetValue("Usage", out object? usage) && - usage is CompletionsUsage completionsUsage && + usage is ChatTokenUsage completionsUsage && completionsUsage is not null) { tokens += completionsUsage.TotalTokens; diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs index 7111e873cf4c..c383ea9025f1 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs +++ b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs @@ -7,12 +7,6 @@ namespace Plugins; public sealed class LegacyMenuPlugin { - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - /// /// Returns a mock item menu. /// @@ -20,8 +14,6 @@ public sealed class LegacyMenuPlugin [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] public string[] GetSpecials(KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(GetSpecials)); - return [ "Special Soup: Clam Chowder", @@ -39,8 +31,6 @@ public string GetItemPrice( string menuItem, KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(GetItemPrice)); - return "$9.99"; } @@ -55,21 +45,6 @@ public bool IsItem86d( int count, KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(IsItem86d)); - return count < 3; } - - private void CaptureCorrelationId(KernelArguments? arguments, string scope) - { - if (arguments?.TryGetValue(CorrelationIdArgument, out object? correlationId) ?? false) - { - string? correlationText = correlationId?.ToString(); - - if (!string.IsNullOrWhiteSpace(correlationText)) - { - this._correlationIds.Add($"{scope}:{correlationText}"); - } - } - } } diff --git a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs deleted file mode 100644 index be82177eda5d..000000000000 --- a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ComponentModel; -using Microsoft.SemanticKernel; - -namespace Plugins; - -public sealed class MenuPlugin -{ - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - - [KernelFunction, Description("Provides a list of specials from the menu.")] - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } - - [KernelFunction, Description("Provides the price of the requested menu item.")] - public string GetItemPrice( - [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } -} diff --git a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs index 44b7806a1355..bb906bb6d05c 100644 --- a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs +++ b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextGeneration; @@ -22,11 +23,11 @@ public Task AzureOpenAITextGenerationStreamAsync() { Console.WriteLine("======== Azure OpenAI - Text Generation - Raw Streaming ========"); - var textGeneration = new AzureOpenAITextGenerationService( - deploymentName: TestConfiguration.AzureOpenAI.DeploymentName, + var textGeneration = new AzureOpenAIChatCompletionService( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, endpoint: TestConfiguration.AzureOpenAI.Endpoint, apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ModelId); + modelId: TestConfiguration.AzureOpenAI.ChatModelId); return this.TextGenerationStreamAsync(textGeneration); } @@ -36,7 +37,7 @@ public Task OpenAITextGenerationStreamAsync() { Console.WriteLine("======== Open AI - Text Generation - Raw Streaming ========"); - var textGeneration = new OpenAITextGenerationService("gpt-3.5-turbo-instruct", TestConfiguration.OpenAI.ApiKey); + var textGeneration = new OpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); return this.TextGenerationStreamAsync(textGeneration); } diff --git a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj index 2f744127417e..678819305a93 100644 --- a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj +++ b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj @@ -22,7 +22,7 @@ - + diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj index 3c6ca9a15470..805e10f7d5ac 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj +++ b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj @@ -28,7 +28,7 @@ - + diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/README.md b/dotnet/samples/Demos/CreateChatGptPlugin/README.md index 3394ad2b1693..e9e035272d3d 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/README.md +++ b/dotnet/samples/Demos/CreateChatGptPlugin/README.md @@ -16,17 +16,16 @@ The sample can be configured by using the command line with .NET [Secret Manager This sample has been tested with the following models: -| Service | Model type | Model | Model version | Supported | -| ------------ | --------------- | ---------------- | ------------: | --------- | -| OpenAI | Text Completion | text-davinci-003 | 1 | ❌ | -| OpenAI | Chat Completion | gpt-3.5-turbo | 1 | ❌ | -| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 | ❌ | -| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0613 | ✅ | -| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 1106 | ✅ | -| OpenAI | Chat Completion | gpt-4 | 1 | ❌ | -| OpenAI | Chat Completion | gpt-4 | 0314 | ❌ | -| Azure OpenAI | Chat Completion | gpt-4 | 0613 | ✅ | -| Azure OpenAI | Chat Completion | gpt-4 | 1106 | ✅ | +| Service | Model | Model version | Supported | +| ------------ | ---------------- | ------------: | --------- | +| OpenAI | gpt-3.5-turbo | 1 | ❌ | +| OpenAI | gpt-3.5-turbo | 0301 | ❌ | +| Azure OpenAI | gpt-3.5-turbo | 0613 | ✅ | +| Azure OpenAI | gpt-3.5-turbo | 1106 | ✅ | +| OpenAI | gpt-4 | 1 | ❌ | +| OpenAI | gpt-4 | 0314 | ❌ | +| Azure OpenAI | gpt-4 | 0613 | ✅ | +| Azure OpenAI | gpt-4 | 1106 | ✅ | This sample uses function calling, so it only works on models newer than 0613. @@ -39,7 +38,6 @@ cd 14-Create-ChatGPT-Plugin/Solution dotnet user-secrets set "Global:LlmService" "OpenAI" -dotnet user-secrets set "OpenAI:ModelType" "chat-completion" dotnet user-secrets set "OpenAI:ChatCompletionModelId" "gpt-4" dotnet user-secrets set "OpenAI:ApiKey" "... your OpenAI key ..." dotnet user-secrets set "OpenAI:OrgId" "... your ord ID ..." @@ -52,7 +50,6 @@ cd 14-Create-ChatGPT-Plugin/Solution dotnet user-secrets set "Global:LlmService" "AzureOpenAI" -dotnet user-secrets set "AzureOpenAI:DeploymentType" "chat-completion" dotnet user-secrets set "AzureOpenAI:ChatCompletionDeploymentName" "gpt-35-turbo" dotnet user-secrets set "AzureOpenAI:ChatCompletionModelId" "gpt-3.5-turbo-0613" dotnet user-secrets set "AzureOpenAI:Endpoint" "... your Azure OpenAI endpoint ..." diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj index a81e39b415e4..a663838e564b 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj +++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj @@ -16,8 +16,8 @@ + - @@ -26,4 +26,8 @@ + + + + diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs index 3ba36e2bbdb8..a823ac316880 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs +++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs @@ -14,47 +14,24 @@ internal static IKernelBuilder WithCompletionService(this IKernelBuilder kernelB switch (Env.Var("Global:LlmService")!) { case "AzureOpenAI": - if (Env.Var("AzureOpenAI:DeploymentType") == "text-completion") - { - kernelBuilder.Services.AddAzureOpenAITextGeneration( - deploymentName: Env.Var("AzureOpenAI:TextCompletionDeploymentName")!, - modelId: Env.Var("AzureOpenAI:TextCompletionModelId"), - endpoint: Env.Var("AzureOpenAI:Endpoint")!, - apiKey: Env.Var("AzureOpenAI:ApiKey")! - ); - } - else if (Env.Var("AzureOpenAI:DeploymentType") == "chat-completion") - { - kernelBuilder.Services.AddAzureOpenAIChatCompletion( - deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!, - modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"), - endpoint: Env.Var("AzureOpenAI:Endpoint")!, - apiKey: Env.Var("AzureOpenAI:ApiKey")! - ); - } + kernelBuilder.Services.AddAzureOpenAIChatCompletion( + deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!, + modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"), + endpoint: Env.Var("AzureOpenAI:Endpoint")!, + apiKey: Env.Var("AzureOpenAI:ApiKey")! + ); break; case "OpenAI": - if (Env.Var("OpenAI:ModelType") == "text-completion") - { - kernelBuilder.Services.AddOpenAITextGeneration( - modelId: Env.Var("OpenAI:TextCompletionModelId")!, - apiKey: Env.Var("OpenAI:ApiKey")!, - orgId: Env.Var("OpenAI:OrgId") - ); - } - else if (Env.Var("OpenAI:ModelType") == "chat-completion") - { - kernelBuilder.Services.AddOpenAIChatCompletion( - modelId: Env.Var("OpenAI:ChatCompletionModelId")!, - apiKey: Env.Var("OpenAI:ApiKey")!, - orgId: Env.Var("OpenAI:OrgId") - ); - } + kernelBuilder.Services.AddOpenAIChatCompletion( + modelId: Env.Var("OpenAI:ChatCompletionModelId")!, + apiKey: Env.Var("OpenAI:ApiKey")!, + orgId: Env.Var("OpenAI:OrgId") + ); break; default: - throw new ArgumentException($"Invalid service type value: {Env.Var("OpenAI:ModelType")}"); + throw new ArgumentException($"Invalid service type value: {Env.Var("Global:LlmService")}"); } return kernelBuilder; diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj index ead3b5036cb4..e39a7f5b795d 100644 --- a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj +++ b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj @@ -13,7 +13,7 @@ - + diff --git a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs similarity index 91% rename from dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs rename to dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs index f4096b5e95d5..ef20853597cc 100644 --- a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs +++ b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs @@ -7,7 +7,7 @@ namespace HomeAutomation.Options; /// /// Azure OpenAI settings. /// -public sealed class AzureOpenAI +public sealed class AzureOpenAIOptions { [Required] public string ChatDeploymentName { get; set; } = string.Empty; diff --git a/dotnet/samples/Demos/HomeAutomation/Program.cs b/dotnet/samples/Demos/HomeAutomation/Program.cs index e55279405ceb..8f4882e3303f 100644 --- a/dotnet/samples/Demos/HomeAutomation/Program.cs +++ b/dotnet/samples/Demos/HomeAutomation/Program.cs @@ -32,24 +32,25 @@ internal static async Task Main(string[] args) builder.Services.AddHostedService(); // Get configuration - builder.Services.AddOptions() - .Bind(builder.Configuration.GetSection(nameof(AzureOpenAI))) + builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(nameof(AzureOpenAIOptions))) .ValidateDataAnnotations() .ValidateOnStart(); // Chat completion service that kernels will use builder.Services.AddSingleton(sp => { - AzureOpenAI options = sp.GetRequiredService>().Value; + OpenAIOptions options = sp.GetRequiredService>().Value; // A custom HttpClient can be provided to this constructor - return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey); + return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey); - /* Alternatively, you can use plain, non-Azure OpenAI after loading OpenAIOptions instead - of AzureOpenAI options with builder.Services.AddOptions: - OpenAI options = sp.GetRequiredService>().Value; + /* Alternatively, you can use plain, Azure OpenAI after loading AzureOpenAIOptions instead + of OpenAI options with builder.Services.AddOptions: - return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey);*/ + AzureOpenAIOptions options = sp.GetRequiredService>().Value; + + return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey); */ }); // Add plugins that can be used by kernels diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs index 8878bc0b57e5..37a390fee69a 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs @@ -1,5 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; @@ -8,6 +11,8 @@ using StepwisePlannerMigration.Plugins; using StepwisePlannerMigration.Services; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Controllers; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs index f060268833ca..096ce4795fb3 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs @@ -1,5 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; @@ -8,6 +11,8 @@ using StepwisePlannerMigration.Plugins; using StepwisePlannerMigration.Services; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Controllers; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs index a7eca68c33c8..3407d79479ed 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.ComponentModel.DataAnnotations; +using Microsoft.Extensions.Configuration; namespace StepwisePlannerMigration.Extensions; diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs index 7a1ce92d0a71..80b976702ed3 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs @@ -1,8 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System; using System.ComponentModel; using Microsoft.SemanticKernel; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Plugins; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs index dfd72dd36c2c..52658a47e13e 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs @@ -1,8 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + using System.ComponentModel; using Microsoft.SemanticKernel; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Plugins; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs index 99b62fba30b7..cd9186d405b2 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs @@ -1,5 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. +using System.IO; +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Planning; using StepwisePlannerMigration.Extensions; diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs index 4bdae07f6ed7..695a3a18e9c9 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs @@ -1,7 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + using Microsoft.SemanticKernel.ChatCompletion; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Services; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs index 13218eeec135..ed5bd4f03fe1 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs @@ -1,8 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. +using System.IO; using System.Text.Json; + +#pragma warning disable IDE0005 // Using directive is unnecessary + using Microsoft.SemanticKernel.ChatCompletion; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Services; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj index 1475397e7eb2..abd289077625 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj +++ b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj @@ -3,7 +3,6 @@ net8.0 enable - enable $(NoWarn);VSTHRD111,CA2007,CS8618,CS1591,SKEXP0001, SKEXP0060 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj index aaf0e5545b76..ac5b79837338 100644 --- a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj +++ b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj @@ -18,8 +18,8 @@ + - diff --git a/dotnet/samples/GettingStarted/GettingStarted.csproj b/dotnet/samples/GettingStarted/GettingStarted.csproj index bbfb30f31a72..81581e7b4d57 100644 --- a/dotnet/samples/GettingStarted/GettingStarted.csproj +++ b/dotnet/samples/GettingStarted/GettingStarted.csproj @@ -50,7 +50,7 @@ - + @@ -60,6 +60,6 @@ - + \ No newline at end of file diff --git a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs index 15d90a3c7b53..dd39962d627a 100644 --- a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs +++ b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs @@ -41,7 +41,7 @@ private ServiceProvider BuildServiceProvider() collection.AddSingleton(new XunitLogger(this.Output)); var kernelBuilder = collection.AddKernel(); - kernelBuilder.Services.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Services.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); kernelBuilder.Plugins.AddFromType(); return collection.BuildServiceProvider(); diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj index ea4decbf86bb..df9e025b678f 100644 --- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj +++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj @@ -9,7 +9,7 @@ true - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 @@ -32,13 +32,16 @@ - + + + true + - + @@ -48,4 +51,14 @@ + + + Always + + + + + + + diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md index 39952506548c..ed0e68802994 100644 --- a/dotnet/samples/GettingStartedWithAgents/README.md +++ b/dotnet/samples/GettingStartedWithAgents/README.md @@ -19,13 +19,17 @@ The getting started with agents examples include: Example|Description ---|--- -[Step1_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs)|How to create and use an agent. -[Step2_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs)|How to associate plug-ins with an agent. -[Step3_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs)|How to create a conversation between agents. -[Step4_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_. -[Step5_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs)|How to have an agent produce JSON. -[Step6_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs)|How to define dependency injection patterns for agents. -[Step7_OpenAIAssistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step7_OpenAIAssistant.cs)|How to create an Open AI Assistant agent. +[Step01_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs)|How to create and use an agent. +[Step02_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs)|How to associate plug-ins with an agent. +[Step03_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs)|How to create a conversation between agents. +[Step04_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_. +[Step05_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs)|How to have an agent produce JSON. +[Step06_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs)|How to define dependency injection patterns for agents. +[Step07_Logging](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs)|How to enable logging for agents. +[Step08_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs)|How to create an Open AI Assistant agent. +[Step09_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent. +[Step10_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent. +[Step11_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent. ## Legacy Agents diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg b/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1e9f26de48fc542676a7461020206fab297c0314 GIT binary patch literal 37831 zcmbTdcT|&4^fwp;DN0e1-lQv4dJ7;bARr(p1PE268R@+%(n}&;x=NQ4DIxSuLJ=Z0 z^xkVi4gKZ&?r(R`*}r!8J~QV$=R7lW?&sW@XXf7fnd|ZEdB8nQH4QZY5fKr<^5y|t zPXS&4ZV?gvSN=zcZxjC~q$DK7x5-G!$o{M36n81e$?uSpk=>!XbLTGQjgV1L(@;@T z|M&jiApdp$uh&hXBqt;Puf_j2xo!pACno|B0f>pX0JrWF5#J}e?gVfE07SQMwEa)v z{}G~F#3Z*#Z&bQ-_oe~j-i`Xi#J6sgy-h-L(>n0xJAmZ=?FXDR4 z|0L&rUeQLUKZxUzc<&ZMafhCPk%^g?kN@!#0ZA!o8Cf~`7cW)S)L&_6zI|t42r@D@ zvHoCVYiIB9(cQz-%iG7-?`vpSctm7WbV6cMa!Ts=wDe!OdHDr}Ma91>tEv$-$lAL4 z_Kwaj6uP^ocW8KIbPPK_F}bj~w7jyqw!X26-#<7!IzAzsp8bdGKb-%k{2##nAGq${ z;JS5V0}`_Ta1q_|zNy6bNp5qBkv>q-BeQa){QpAse*ycyaZLg!iHUBEM|>Zk47kV|z5rs}P)ziyFu*3!y9EJYoQ6W-TlG^NkZ?D;E{_qn7PF5Q+>ynxzi z5X^^-Ubtc=`K`F_x!r;lz8FGE!ifT-;;LP%|IJ8me6Z?ZUm}~WZ=v4cz{NsJ3T8yl zqy}dk#g5qCv1bJ;mgu4^EU04tiXrg#f2tn*j+3oE z9Nk#kMc7TiUwrfp8x;B2w7SkZ-Gt&Gx9E%(54%~}WB$2=(`7pRGe%^f z@u)HUE;2S#jm8FZxMiM*<39e7*vV`{h(6b^s;F&vA(h`Ud0z|qS%LOT&I4dpu}sXz}#oRL$n} zd$7-??h)Jqk3r3lOK4aZ%1nxd3~7AREe5=AM4Hy)8Bk_kFKdjK55BM)RD<2f6v-^b zgF>2P_{-8%{7u{cMZBAAx&mETXu}xj63-pLlt;s01@PK6fNIssp=JmjP$HAka24=$ z~g^;c8=>Ccu9T9^TBV>Mn3MjZ*&sdFJwRCbjFJ zi#+r*NKJ{}tY<0GV~5VRvH1=YXH;hf+HAceq!zB(>>O4!O*_iBdt6xq@A3Bc1X`5* zrgeuk<_#h050Pe$6H4~k1KBUY=#M-J3sB>OR*dG^ z-{yxUi%Nw9Nx6HPM?6?jRo;B~^3=>Mdcc-WK<_Kuj^|KAl`qF*_8K7N%$c6_t^Y^G z1LZXC&xXY3HG5mN*7!1bm_GXBknrCJ<`WgSb1`}_Q`fJDyMiM7N7dkKKxK0VRXs|x z&OphE=b%CH)dos%Q*`3s_~{7nZT{ruq(y8aZy_X=#e7aHd-0Wt>AtdG--?s+4A0S) z*+erp#S01sP1l&BnI9|pqs>f;CjKX2Ba8@CpbW$u)%>n$M`Uf^6Qd?L3&( z7~&U}^mKND5HyMqED_9SN_xQkCb!&eNvMw4De&=Fi_KQqtpP!%($%YxS`P^o`5m7x zEb;G-3^~6zcAPiK!HZh#t^wn)#hiAIXs_m{fikbFQ+4Y4XH$=pri#JcB`V0JZ8CNFTjJ%-pCZb24T#c*-LQWji_;2+qV|p{y zh~b6_unxTYOh8uG;jV`9C3-cb7>B~PkM;4S90-UBBsdFm21%f2WHCt6dRT-BRoPV> z>lfkvuIkdbl>%T_>*O|#8d>cjJpaN0*FL+YSq0m^nP0!YW)(f{sz^LqtZmo}KbDs1 z_jYzzp3z|M`C!P&!MF^6jgzB;(lVLnJKIHmMI`F4??K>v4HD@!6m6#aW?CJ0iEZfe zSpNtq%eRd*%GC7*G`S;xl30FO_m{oV}NT?qnln=)|aC!8a(U~xZ(@Zc@58lwbvPC zSh=_Hu;44?ScCl%Z+SgqZ>)YJlv?-}B=nZ7_r-X(VpEA;5POH?r!tQr!6hLEM&&ra z+b!Y4y5Fj$#lgG0_nz>=2%ZBF2?N|U;MjDkd_-IH&F4ez%&ZQQAjGP!l8$Nh(E(Q; z|L{v!Y>lDVd)b{aovD1Yzd_B2c;T<);GWKp)GHu1bWmoXV1D}5tiqR7wTd*3`$<+6 z8j3WGyAu7@g!nw;VX`1oG-zFp+EX5 zn&yDsIy=ur_QJQ6?QMs?DG!Qmm^|p1?C<VcC>Gc z4?=2x(M5@teL&Tm<))nMy@0=C7<&zv&uG^_=Y>rOa}HmaAP3?ECsl>wQDptxB%f$7 zq)pbx!8j=tQgP<+%Qaxpq`{~KN866yO2@+{Iy&z;FsEj@ay{>b5?Q(wI-8z@N(jWq z5fuc5f=w>f%25$1-pTkB1?x3VSTWfgD|XdX;|pq05L6f*jpm8+gy-Qm*--^9}o9m%|eM5d3P)tZZ`xvUfRPb zamm?r<)1>v2wmc))*N1MDyaURYP~q{McW$p%A8jf~r;X#YQh>Z0Y-pEaS& zlr$#iH&ey$GV?+ix1>5W)gf<|o#83bRNdmBng)OMsD#PLHLKM+mi_3M;mZ|b{B1*=0`*GM%_aaXUv<)^dRenFAT0m8qw zA;Uu5v=jfpIh<}!4&ybTjY~2og!VF`tG;VxtUf~AXdDci5oP2ZKUAz;ro_fv*b;p6 z0;KWJ20gOqO2NA00TX%7)qTm+-k-e=GA*gIlh69`FFNWAp+LLEEG^xkBPwU3^i_9O z>3`KjujVUJg~Xq!r-{FOv5?wQ{$mlTL=BM-Dl7g-FLv>)tuAHKL#w-tGRM=V+eZA} zz^i3I-(2Bxoj0?!7+uK{zAC-rxoNyxQ-)(cBGExuzjoAKvz4?;&V{Ln*@LKz7ZBa4lUWm<>WK_aP(P0=y5+d1Pq91N zI2sEaA7TL$c@3aBGSkFrV5`ry&aDckCEp15c7~0pDZS|m$JMs+oRBuBUVd#BoVxnQ zY=|g@(0>z+rkCB3Ql+TiJ+kIdX)NMrwe$34+!W>bKHNT-QQ-xtm8+dU+-Jyq``V~`M<_yS_YQjGhU1;+s{q9n?NVD zN@cXIsTocTrT9DwJGj83QgawZ#`MW-!nc8#3NRGF~1Gf&6 zK^!UusDBM%^6>@f+U~t)Zm7Lq4OPzX;KhU63$pu&mZQ4UEF4)ogE?RJ*_>9mLX?5N zgRq?`JN1~r44EF5ZZ#pO?<~9tU-)kLlgYptR>QV7VZ(-NW=XG>t69zq)hd=Su`OYd z8z0opfh)HqdXVS@Xny%dPPwT!#2=J%)vHXIhr_mBx|=o+)1SWq z6A*FAI`!79GJ0yjU#wqhoo0=^IMr0>&e$0glPQDvV%P4tneR0Y2lN8_HB@WTH6}NA zCrxoBHkiByD#6T0d>WOw^_;0TJ|eQq=H9&T4i*96!zNNHG4=f z&ljyM;@d-aJk2Yl>O0mtHMx+NU^+*Y;ZQqcCcC~mY;W8dIC^@37t7SstlU-G>;kc>Z?WU!~YqML=J zaQ7vOUF2P~ouY>-Jm&5w760y{Rpq2^tsvVibR#>xLhQUn$b*UwD-Jv#_l#8Q4cN2a za`ao6wftA@IR(;>nDTCb+9GS5MXdHOD`Ndo;?&}# zPn&ZK*^}3U_;5R6rE$DF=k2{KnLwKu&Av>wj&D924f}98ywdL;$Z|JU%c-nVgZuXM z*M9-y2E^yNjHKBhV*_ss=p0Um(tt_zl-0Jv^D;*O?bdI{os*nM#SC>#UZ;6&DI%Zc(9OPU`?+sMtX=-(xp$EZDf^#j3%T)7pE=e z&#g+2S`$Ti+29rE(PVR)qB5o|uF)YDEUQixx+7E*8r;+4)Tru5})*0IZqqFp<%}9RHcBEEOp0Eu66d;t6Mhd4E=To~a23xD|eoa>TvGxBRCva$i9hDe=#<;#O`stnEW*oVxA=d*>yb`g00GVT! zvhx(z_6+K@`$ndBGf2mxtfxO$SGtT|h{ixrGHTPDt{R47VD6w1H@^RdGT^s5OLLy} zK0FOC5sGir_}pPxx%#tD&;0ChS69K%mZMNg`HEhwcyK-I z!k44&LwN2sB00{$r~UA_EJN6wWlB!iDMju_)@wJlCAa$QAFvnZ1N@&yKcyQ(;Gzz> zZxQJZGdwM+`S2jj%cMFJ(;lXrD$y^8=dcNjj%JooE3+56VDEQO*96+XZlUZyKKpRT zgUyKv96v7vzfC*5sg_B6i8GykGIglrcQmWPei@I0FDi^W|GZSB zQv+OtnI#%cXJ`e?lx(?em&e_aRIGmYdpHE9pn1j{+-1?XIuHa_l$KS#N3hVrd5D8I zm;=i~#UBj$iiJ63lp7scXYRZA`(mSnb7^?`IQnJg0s=az<(Un0`u)!zW})dV_R^w^ih- ze+v(cXyh7TyK$%+HR&8B2`;i~8Wy2nVM2}68f?NY#{}bRKlovDeUOX1h*D6>4*$fO zj*@$O@%jXt{9$=&SN&BC?&M{}x&DjGy!fg=x(k>YMobV9kL!RUWKA?GNCAeDK1*H( z86N-Z2#;fH@fFp}_NNy2TVBeZGmf`fNO@u<-dC$V2MRJv6==$8FhdSDL78hAZvQ&T zc@Alb6)ad!rGKi;@-)@Ig6HXNsJAyaai*R(-Vdcn_uBRe1BswuPFjTCfLwpT)*_s- zc#ci-E>ch4jOdJRBurt8j}>V@S{043U+tT|L>HmNTa$M6W{!5`gkD3IMc zD&KT*8eFN|3Lb_Z)u9D4ajW1g-gRz){yGf>|HX!u&n+}GQJ$%uM&m`jFWrVTh83RO z#5+A5+^{E66fhTIsC!JDe+`HqvC@vNtW-j*yPe&XO;lW(2+6bKQrA;<2}?1N6$S z0qrjWCHGA$+uunhgiWvKEept0yw`auH~`cxy$nBt_;N!xH*LEeSav#FYJGurNfs~UVSl-{iUvUX`XPkm!C@i4pC-8=^{?A48{Yzhf+EgRaaN^ zj^h$zq1(AimrM3}8tHaqb<{A)+W-|eN( zlP9W3yxChK5J71H6gbD6&}Mp{my?1U3Fec1?miVLndv`isS1&t&)$$V1qBj_Tl-44 z14=N^y#p&VCde~nJmtGTNp`uc`v;kvY3-%Lk?fkDoUn0R zF2WO+7tg~tBWK^bBs$^dgXWpXYZLr`Tf7M7$#DqT)rx238v_Om-D{xOfRYo(W+p(! zTNgHlLZ5Wa1cR>P_ZD0p{k%qA=)qIGMl2YpiC&eM6b#CM|+#s127+&=6Xa z-d_&H-pTtiKrKUbL^jk!^9y85p$^XX`6Qp*5-vR9Bx0}Lz+te%Ggyb!9VU5s?jN~o1l|r4StH+=}DeT*#;6XSkP5hqW`QU z#nJ@lL$>PYCfU3NNk6S2m32+_b*VS?C(x|rE|m{R8QC*Et^v&3rHz_JEXzd8m$PV# z?_TSE{#Y{&0XKcM_)!*ogO&t2=$A9fpBmwa`0`Nm8sM41s4@kG++u0rdUerx`rSH$ z7#^^OBp8I=!`Ke+BxnG-d$XIugVA(1DT7Zc270#HJrh~xiHlC4Rm@SD*Qn~WLUOb7 znz_61mcuuZ!#lkMA(q3EN$+U!cj2eO@o+~STF}>eIbWTFS9RgweBT-g_1rP@;owMIj zrBRW}fP<2w^1|pPd&%EY18oyRQuWwzO}+EMRz)IzRKbvQ+0k9kaL3R_SDiYLDZBBj zjh;sbvF@g*hNb(yW%6dPY6Kto8b{xYq`(k=YxO&1yp`IC{S?BujI58hhFLmG-g_&JSu5r-f|ExmVD8CIBPN5JkmP@aLHO3& zmyrDabNNFCkSS=QYI_>FQ`M_P@v*!CZl_LB^W^=x*VLMP% z$bI!{AL_ehhP#iq13ITWTx|vJ(O#$*C?Ss3TNTxUDSVijoSYstLTwZRiYf;!|8RTB zbG%X0I2uqgBs{^3s^VDty^CI&*z1|m0TKsXd``&)V{ub0p_%Sx+jG@Hg^SA4n^^72 z!JS;*WigoG(+U8OFTk~j$7SZ#jzubd6lf^zlu)olKH;vwb18Mvm}(~73whg7?x27i z^pF>gYq_`kC_y!RqY)vTy5HyNeqi^5jg$x&efnY%8)ud6ABK#|%FL>)T>iNvM6sZ< zfl@Zvy5jX2*xWW18Zu?Cs6mJNx{&`i|x6nC;} za5_Hk$J~bf*)VS@Av}RAF`N28pIG#M)z{*W-6yx7IH@=*mxC+p=Vh-q1pg+v_rvPl z%_=S%YcXy!5nDB3zx_%_Tr*IUgE`a7(xbcImpUz9 z%`;L@S6q|P#yv!LQ#mV;o?WM}&S+b?qEj>V<>4iD#4#M#zLEWCcx~#u%KlH7giww* z30v;&nzy0~n&iOT_=ki3mRpR}H6SH4jK_|VWC@QFoNM_QOm&&`f>|x**p+4HUSLkR z4n^!mPj(+669fxT8m=Kk&-6W>5_I_sy4lOt_@hto-(@uOKl*H>o14O2;tCxtTOj29^UNr-#X9&a@VV`_%MPYE zyfk({am)WB*TW{5(M% zg#mA!q2i?a$CB8z+j|6jQ!29-#BV~Em|f0F8ctn8xnVM3;55Y;B{ zK{LUY@DtFbTDVHq!qYc@@$ZIzBzWlv7uBS@MSE8vcCWl-T5JQ=Ea>f#45<&yRQ(GE z4$Nt!Y{4NPdIbK;JU0DZ-1p=iBi*hNL;AqgtOjL?Ge`OzU7;^J%Jl{+N1J3v@Y@8N zg&BobX2s0cWddB~r*Bsd09zr?*7zA&{g={@YJp(u1uB`HvkwqYR|RrMK5&xl>pwm5 zF3&UBa+!AB+vsKl68hE!#Jn@MHs*%+f!poX?wM_uW+wDQmnUvhB^xJRim9qlHbc~7 z$HKn~(Q5s5Na^9~xF4y4Ax{BYZBqJEsFM7dSIi3e_T!xCs}g+JwY$jkHCEe3=-T4D zV|naF0J?cPe_#hxR+WJ3I5_lj30#jLRIThRRG~AlT!m-oN2;4(k*??z{Pur_Ow* z@Wjmfsx+`_p(WyVA_Pt^j;Rw+!rul@!qRg8;j+q_BgT_D=a5y zb!j=UX7)H?*g5p?i4=kIQy)_eIKvWFG6L|20&%Y>2cZ)gdxDv<^3@_!f#N)|(W=f(56r z$iVt^OBbn%dznR|U(B}b_IJq>n1lrm%^_S7 z@7>CT?r37Lu)2>JwGI~AnTb$)yxF=;O5%ZjE;x&zA%@VJ`3mN4>T<`1(U)s*B+IA~ zJWDljC;I<9*FXG|%%b-idvFcVi&{X2Jb4OFgn~I;z6y2D-_m~hej_h;#@aIRU+RTNS04s{PhsLo9Jkwt*C9A)H|h;b zD+s|Ier#irrS$u27Kj5WHhr!(QwK z^L&ae^$gSy)};#aucorredx>BXP;GmFS1p;R~aS@Kt?##sq5$4ZhkFL(Idr*epk_nCC8oz0TsmqY7B*JO@fn3id_p6N2fah z!oOzFc~fD~A#ia|_EB;L_wIXxq-7bFXJ}#)P#PfyjKBr}Jt7a;6m(N&q*=m{-%Pc{ zQ;0=;r>AXv%mruKM}9_|9=-Ej+^+HR8op?Ht4<%7y12C{xadlouOk+nS|`k&s;6OA z=h0L;_ck7{y_{%7Uz^sr+S(fY9j0bJUM{0QJj%!Z=7A#hpStYL)eT>_p zr{Wn*@lASdFf-sHPBTMH2JqaRDItE>h^QO=kkTK7xhvDh=`q+6})QzG5e+256i=_8FS%Nz1QlsjhoDcV1Cv=0@iA z$Zj0;rK6x4^!Im=0Wd_a^$=bb{C^`e4t_KJJmVT5G13B2rnvFa)z1|P z3}7TwR?lO+x8qK7sN-c{}Wy#97|EY?Kp|7cqL*{-_P zZ1WZ#9WtzU866;M=X$4uw=|W0dr`X=^r6mMZ1+JVj%EH+gQO|2<32+Zeb3T{=DIG!I`PV9_3zKK1*PMB0k zlc9pw{-&+Hd6n0SS@<7gzPrp3Y%LC=2Ce|$r&$*%_)X%juJTNymHJ+G}5NIEBO=7tJ;c15* zt9a}_jg!BfPeiA)@9)=RCP8^)+G3uNm}*;M#%Uj>lVv+L!P7aJh40n}capY-++qzA z8cdqb1CX2khmby^&$+N%vnTxx=ov@es?9Tn89@baMdD5+{t+oN8||7OPND@cAB`Y2 zF{sMGQ{TfS+-lR@F(&~A_p$Dj8PYgbfQ!GuPwGsS4R$lRHOaBH%J#s&l^yXlrO2qo zmAfy8S!as61>)mT3fB4lpf;K~cLJ$HI%;)EBf)->w+?EX9C8gHf2S%mzRSY6v}JKO z$E94PhGx^p-koo>yv#PoV;EV`Y`4Ya9K-ndTyuOARQ!0q*;vGw-*Jg&$j`zS?nqOX zJLZJAG~7Rf7z-cnRV&<0s0>VaKrt{zyb5$7b*M)n^1E_-1kIWFhw`|{ZEm}DRmJsP z7_DWpw}jhuSWHnnf;yz@MI$c2<>sa;jDU%8u+g$GG#SSpXI zOU_z;d!=D~?@O#8?wNAA++r0vHiYNKYN6nc(tFHfSSrm{JlrZ zXXA1}0Z$0CSef40_mXi>&7f-@Y4dg!z@zdEbTBoC*}3NsYKt)BKg zW4?HUl-UshCw{7i0~Kl3w;>bhjhk)a{ra21z0h68QE{c!sY58i5OD+O90kITC6ok65?k-ClkN zE_`7O}Szz|Vvkq0xjFTuqvb5Y`s@SS;>A($SH^pPZQYjYP^P_3k2jD>ZT& zSA60Cr|)P-$FY6bAkJ$*D7J$+AW~oM=cq@Sz%1}$k?ggE12(Q`gTxn|q|Bg1({F@!Xv}=Wn0%mVSOv+xo`M_jaX25VH?uU$7l@zAvToM{}@k zoL0HkV*&4b(g#@+dtHGn3owSlj=kF-LG7y(H9awQ5+W;t5ueteanLLs#jSw>iE%5J zWU+guLqmNQe#cuYC!0P5)?ai_8dU>R`da+69nLkk#7*WVHq(!wTj5n)*w@45V+uYz zce~vl27I}-EJ9=zdx*Yaj1-oAO!a=NUgqO9<5J#kx1Z>K^N(j?`u>^PYc2DZd-{nk z<%Po^{8|@y{7755BfW7lEGYpjT?^e{p~<4>HKvnOos{{&{++Z@YjL%Kt+~YOpZygb z%soC**rkZ#isSodiFI8mJS#u6(~^68i5=XqdkEU_K0%gjg3f+wJ-r5eEaB}z)C4E~ zTL3zI7(3DLw`^cW~-5`uNGipO3juzW||Gx|)|tP**VR z$G4*aFquoecAFzQKXcG%!)7DQ=Q5r@v8{!xT;Y@pW&85UF1*9A7=;zT?N zJNrd##QdjY5on7rv4TR!k{5yb6B$t?1!s#Jq2ADU%E@;C8t+JK28j5PQkY1LOP!as zE|sBSvetwj#QRiuYP~eW4DtK%f;pS$!ob#Gj*8j1`l9p1l*wQjWjI!&f%c_wt0;9l zOS6moH&UV{xvhP8`y7dVJ*Yc3WG;q_oQuT6?U~u_Lh+xBOR+ZqBU#44UpPkr>uz zO4kRr_w)28Qm(v)K&iKtRW@mU{B9Pm$XO(VHukAO&U>ux0>dKuBS*hhSbu5b=Klv; zISq16R~IW5vPQ@~-%v0S27?hnCk^(Ov`P!p(FJ;Hav#@9m{T0h_qCP1lNG!W3I63P zRAilJRM~~MXI68Hs^n}^X4OE(+@9P;kKm>uVzD5k2?rGhhgwq)pG8qUr}OW6#lx}g ze_zjxR71ZFy3p`Dd%V<{EkJH4Ko!R|WJbF!K5L&qSL^djMY1BfEwI%oc$R&6o+vsO zw{+}3CfQ_2^{PDeVH_=K2zW#z!>8?3w~s(e01dh1L^1^wYo^HGi?xwq!1SiC&;H)s z975_UViFc}s3d?Qyy~QDa%y*i7LyY9_p9M7NAj46l%3)z$*F?ctA9v?gfXAAe23g6 zp2PziHwq@8U4h`764iztVM-4Y2+?)kiK7^ke#O`-bql@X`~4osiF&bi`qrVG)Z!I; z5-ct!W7|UkH4sb@$<0Z!TpO-~Q(2HoPL}m_QCjEj=$M0Hsg@w<2{#=FxE+2B2%Mva`>GTcrBSTypyf>4}S{!py|DNAD4e&@# z#`f&Y6;1~bSugsR#3a|=1*UN{Dcp7Q_dvR@X2Gt6%%NvzMwm zCTs_OqU`B$+-3OAbSGYYmWlNsLE@{qmomi*v9Gz>W+zxIBBv*9xNnvw1)*WAtai0E z!;UGEu5;l|aC|b_QPX3^`;``hyGcf;ow-1Yap4t#`PGgk4kz*8Ka-X9l0lY7iV;dP zC94*cL314UM9K{ISe~ap?h)XvG6w$B0T(SP4>ZLrMC_<@Kqb-0W?XX$GV%5SZ_Nuz zw~(^hO5zZ=cg1K%B}BgvJ3obv$aIf`#}VRqbRCu#_e#`@zBz6q7nwM`*0LD*^_M~(R}yD?hSn;=W6g0ofruf z=jz8HJWetR>OfpUlkPFqzm^5iRe$pneA!6CDaM({C4#a2_wxO{ZunnVEQr3D9IumK z=2O3~A=+pxF))bSE0=cSReJVRer@%5W2h-cmqq`MpXxbg(7!m~C@8z&yDaKKvB~t1 zp@x?~a^KLr5HrW%0b>ZT8{L`e;CP5Avwnj_=s7yF#@W8lGA&_?=bS-kR_-dWK+08G zfv_7V%y!4Ri=7R-JeIOK*EuPc&8FGOtrkn$o5GaBF+W>5U(}uw`%O-2lQ|`ettFZP zb-(8l=4>s0)6sOiqY`h!W7GV0KSW%k`&I*thTv{Y5XRG7(W@?>q(?*qc(cY)Z8;jH z;NP8wI4~|~iAe7$OFxxkgbxe&%e&?aP<+amdzJ4XeJ1?YdkYbdt2Gt=h$xXi@S0H9 zl2g=&dFNJ|iPqscU>CH%eZZ(;kvORxOca93T!Bq#mJSkL8TLqAsuv}ZD$+)L@A(C) zK{qo!8so%6cJsn*-d$H2j zfQ_G#SDYdg{MleBY*@7EsKDek0Ool^t(EBi`*6u5(?Pq4*S5N9#;p0IIa{kB)C3mG zEJ`bq80aGf=YL@I(dN0PYo96~fU={45k5jQK8JdGHIh?R6X7cc&T?*xqmT07vx>=3 z=lx-;7@g3Q_*oYISCOV|lB4TMaAozme;mV`DlVfBwMowLmp=1$!;J7U<$0*ZeJRoK zmTKvW1vabK{1%l7|H>CDFkBZsJ6WxkMxMh%K8wJ|RJQ**snB2Y*WkYGZzi+tzBgjumB_; zN~NVc8&QyqIl~>UELLRNd^6VVpDb5A16R=4P_f3tb}j9^apf zyHa#ySX7-E%Wy57)^L=`_;eBsjIXaT_`buYDDQ4MP=$Z4%t~m-n2i8|B0r$3^dlbU zo?_}gGPim)_lf7B^0v*fH+(;QuqvTtA%}c|;QR_Ywzn;PJefE-xtqND<^d?E(3|N5 z)1nvfh@1|3|tQ+J`N!i1i0G{mB$o}PnNdN%5o2LR@M#kyi~gzYPRx9wvV=! z8dY&c9E7PqK@~eA{Cy8+B(#TK`Iqddt*bcGvOkA1rQrug-?o;chCBSQQH>Hs4p@d1 z?*#Rl)m^TPu26B}g9gX;y2L&*o;7kZ&9$BCb43O;{S^ytx2|Xoyy?bUUJzwjaoWqG zTz|icen81IcFlRHDYk`;S0~r2(d=aXJV;4BD>ee_Dpmq>9D;TA_2yxnT|}&FJhtXrV$>8EN=m7pC@PN= zp22>|UK_Bx3#dr&VRzCQLJT7ft06TJHsu6u?R8c5ZNH|0U)KJ}|h`pp_}&lnb8Nwm+d z6JO52?89R1pGLdDc~QnL7kt3G(MB1mQP`L2nD7y)t+u-m(qYfPvCAaUN_T$n6BobN z)iX~q&VK(h%%wy1T~b1Qix54sAyKywQd9P8`t+i=nodN=Hu-qwA|7IlKlFo!#0Cia zE;@Q^e{d*AutepCu%fo-AK4fU8U(Gkww|<}_APS0Y6PuLJBY1IBXufYP|`}qw^9JA zKcvi{ba_rT%n!;594L!>M@-EuU_!3;7$Sy=YV@p2&WAHzXCh!K<ZIq~I+%toPuD65yOKybH`LOU-xCHkxn;sB{m zXzojPsK65l1C74=!z(QoIe+(cBZ6-K+YOK-o|uy=s3;QM(>J1jB@>$!kRl^= zYR+4;=jNNGb%jMy6>Cd=qP#MFljQIR1T9=?<7dlu-Ug|C>0_$3!@KP7A3l4T@D%X0l~scJ zM<%w=;p8eHUU=uY@Z$oo;X}x4<<)}RA6jB0`E_#*`?lv4(v@j9 z_`X>+I~N`uBXSr_wXuyW70~{vks>S2&ot}u`=Qqo5C>Uxr;J(@XlJxn?Lh|TII`_k z92y5Rz30nk9*{_t z3k)`Zp(jP^ii~d9dm3p&f@TB~ij2>>*&Y1Id=AziOtTV!^Fyllq-jdMW8>n4sIaJ^ zzjrE=X?-Hn%cu9VA2sA3R6Vkt|0bDGSoUiYbPb?`JM-F{oz_nCs@46ZZ|K=7RQ)h% zxI1MxrgU%gLS;V9@k2Ad={TcQoualvy7ru!RIMv058$FEPAbdw-<3DQT6utFymOh& zpcLDC)I^zcc-hmcKcDSREX4j7MdumL=G%sGs->;fYVBQAYqzyWwDm_()ZSDnCAEny zt-TdRjZ(Esq*m-zBzEjnV#eNy5kcsi_xtl9$MNL2pX)xa^ZcEI{d)s`2({50VS$^A ztk5;)M)vPMi@b$2vn{Q*)t<@`B@5=iX_ecnW z5i5G>TFR2JUdFl)O9QpeZ@Gz=rJe(9A_AJkS(m@hYt#T+Rh&nG3M#fD^q=zq?1T@Z-YU39XJR)SR+20Ik0c>VR;i$7N$d|I1W zugpkBJj zJ0FsK{G5Ndo}x>pTv0huJ;>nQ2K$VaElqBXMAToq?Bj3B;#zO<_Yy8V z4&@gacBBF{dpc(uvsgh8Uw;}V<-6r<%diaV;4k_r6^k4sKP-}$bi0Y{ROD_X39#OZ zV{!S^*t9CuZQacO>0{y@4R3n)lG0p=l^n)_mDS9Ae(uIj$`4`YH6A6RXr;QB?*h+M zwQp02>5)U!KMG%;(I=Av`C$zgwycbni=e8xcDbe3M? zwYsWtSV{p$fWG>)jEl^UmIRbz{lBRBwObs4AD=6+Ij?VP?a~Bc0iwj1<+)SBj`orG8&wqua0e45Pgo zQUu2-YZd}|_|kwwD&ntCTi);0Ieo}rFQS4DYQC9c<}zD9gFIgH#8)ULY1pvlT>3I_ zLp0h-UY0QLFIu^tY!$JJb^#szbWo;Xasc75SeE>HyM^g`S18i0 zP+(g3AJdi_<}rHFhjTQ^qpKgAItFot7G!VrtoWy5`nfdGDUuHM9|hf}qC{bdi=u5t z$>mQ_T5xXgf61bL@*>tEXP?#nKS6qX#V;!d;@cZ8GN<`7@$0Zp2>KH&)p{NkpE{gV z{++kybFz}z{B&-JMkF<$LMKyu#zx{h)c1ULYs;_s#C(pmHoSE$B})&HL(Kidky%-8 z3%s*s#^coKwQ4cq#+Bf~&cv9FO6POva)?Mn6OfnhbKELm z!}K=VeayAaly!jxdI&r|bD+=rLbI^>0y*?KalK_n1Tdn_H44-Wwj2S%TG0z;7>A9G z_#Oobt@iJPg?Z&_oyq`KtLfuSKQ{oc(wq7=2Uq?ki*X)P%0&BU&Efldo6Xx#;6f!N z00*A_l=Lul(QcbFqP@kMa--!?2bp=M3DciwO}QRoHSv)Iz}ISuU-tC_{o}>r%xlDk zwJ0LS9;pUg^Ol!cBOZB4L;?Lger^S2Lff+&boFM~OJMA@onO(T7O!->!yj?o>t8NP z^eNublaO_@B|*eF>Ki~FIfT01{Q&th^$v^nprG?m1g~nZ%`=mBt?GIid4EX;HU z3S0ep=8X^|orvkb_=|0_V+&|S&=@P$e_0d+4USv&(T45!-WQR*vBXTHaIy~TcuZ{U z%W3NfYnH9>qArHN%F*fjqDQ?;-~Ia++o$JU7)<<*r(TgQWVxf2Kt54QkrAfP7HCfHY@ zU7(k>#k4S3sBOU-HRFFLY!6l+m{_}k`g*!!(ZM>kFEa$s0gmWlfnJZ}oI)3@>X|OP zFK->>oa!18OdE*$rMW4rt{}YY*j$y=R?EjYe}US>XF9@j=0tnA7ScXEBm4vGduhI56kxgYw46EM;>cf-}!MO%tyI7-B7xTM#Y>&xoIQK8pn&~FV-XBgT+aM zR^-((LU)!><;L6h+k6{`d{+o{pyWTqn*U*3W4sAh^O~|)K`gS1>zg!J%tQI9~ z&C6WwOC9v{N}cZ5m97PbDsD}e@Bbtm9-Szkn+qBFL)9HR$KMmd*qvJs+ZtOZmZQ~s zVLlx@JrRfB7yho&9rb}gave8jq&cnVWjPn==dC{R9PQCdHwx4N4zfJ^sVJ_=L%)+9 zko4QPRMFxG4YWx@)F)c6EKHi=6e^4p1Og0LxBs4 zwQB&^uU|i&x+3*{p|6B1Ph3RY-;_4DGan!QqY#);w!3)lT0nCQ-EDdY(?Zzc&CeelCBb^lfa!Po$%>58%37D{eoK&N-kq6^EVw(hd0bDW z1V1mL4y)S`l3yJ-j(XLXw?F*E9=FVIl`X#W&KIoQE7{|(GHo1JmE1Bv?*K=r(p^31 z@kZN4gs7MHl|Q1buvA99+49b#jKwH@skvSuNSfC)=n2TWx^=JurTg37_iMAxQl!=T zzsAkcC3j4-7fgY1fua%S(-(N~DeUQX9YUr$)?paA``>l(szC1J?DW)}98dI6uN*3Hd7w=`JvD3U{_3XL!)SFRH zLsSyW7$MQdFXq`c;KhAd|7YLn4Cc&Nv&||z#v*{L>|}8_o$NIT$kmC4V_x7*QJE?- zovYjmeEq-j?vFB1tG17dHWKZzCwPNOdZP%jZuQ6T=E~-;UmEgDer$HGq{o+y9k+owW_?MP6RJ;@ zN?JbT10ti8;?CbPj+QaT6j@0X8E6k^ppIT&DE}y5Q}a~fih)hT*#Qt4;CN& z%nda>j$?}`HuiBZR8MEoL(Az+NCL8nL zI`WD3pz|dT2oAH7cP;>d58P&VQnPlEspKZ`S z$dpvuIBd(mA9@DSAf_OKqI&6uEoJLt>yA#N@hL+oqb-ZGs&86ah~JlUo2GkR!0aFd zjhHHO!!~$%p4JqfU*Vy045WMN#12X--{_nKDF&nd^5XREK79!&!ivf1+>?zdF3)sE zP`{YuSz1Yfgt+^~mub?jW*Y-@zicO*8K%Z@K*5$s=q%hfSg9SC;@rKEM zQ)!oH(Z~h1*)Iz3fYZt{%f}N9^xmgNGb_P&-3W%_Rxv`fU)|>Fb8~699G(5OcS*F% zMa3>CCi86%$}^|w@vbNdhsDEUwHT0q;3|24`^?q??~hf9ig0B)2js)VAB#xvIa#nd zNAVSWJ74lg*aDA~u{u&c8tIV1_UV#yKYf$X8Ca-<_p+Da&uxvbc5Mg56Fa5FLOAhg z_jiEoj=}F4h-1?ceMeEL&Yh&}m=Uh{ZEu55kN+mmQ$hvsDRG_4qV3UaA)-FlYHzwh zjxSE>8*kqqQ4eR99y>QXf|i4+exc9Fb}8cT!*1gcYlmzl+b?nf)Z8m5Z=uG0Vi13Tsymn_2or@W} z<}K|!eyzSfO9NoWydN;MbBS#!(AC(FxwOU4?z0(o8WH8&5OZ~RUdgH!yOMV3l9$d zQ7m&$Y&11CCf;%{&KiQ%zj<+1?2xQ4&vi7>%$QJ6b5()c`$sWBQeV!lUKIKn4^m*~ z`yohaOHlvZyPj-R^kQhO1R5$senGbR`Eo+=sc^^*_ZAD^THAypfCJ827U@B-;npau zRQ^v(Bs%FDjI!IKop}4ahw$Mt-1pM?W%Np)tys*$ocSb%o>xCYO2Nk%EX9$V1My3mqQN0hI1uE($7DTm0(-s zBf|ENqUOLuVlQXs_HAoXioxU`OTTbE;S}=F>fxHdP{)e5!h?(G?Vvf@FUw`@?|i7& zIJY=255PSRyLcQY;p4bXJtm12dp%2u&t|KVV)Z_qmA&D;06Nc0%E6WR1UoY6%^*M$ zn-$fdtG~>fgg!ir^B*H=(z*7eWC}J}yE!1AJ~HiJeY>$v{#_aHn)@uK&+J}*d>d#v zD`qPjnQ_AaFt$B#jq7tvs)pjDE~pk!_reiF)bC~S9;(vux+q+Ov<=oM&s}Y1EFR_4 ziv9QKHT}GLn6qWsy|}=v^r|55M|q3%%*H>q>KF2_om-eV5Gt#Rrx+@fJV zwt#~ZN_btO;;lYptc2}}&Z-I(9w~%5Ue>-zFtFS^Q(@f>6(VN z?Qhef4r7W37bG05^+ zZNqEV;S5W!mK@VX^~Sd1jDwCGasR&;S<8TWWh?uSJKV`x;$%yTR;onK#@@%dPyLX? zqeZTq3!kM&xyvcL^DFc(Uzt8GwEOz2-hOoWSQ*+LW`2dVY(ONEj7tA*%AR_fIw9i)-Qc9rl=NKTW*qb$oQD$s3TDVr#)80#Rg0PEBjy1C^95o&Tfg;SEjfv$s#K zFw`5pCF%=2KihKv!Y%3Bzoxl#@J(8>b8MlpntFN@CT5r8q&_DEE~`gPZMtakUGl5d zs1AoAxA}p-0Jp_G3+(0XWpO{+jhWr%_bEn^ebNbV&s_z;tjr9N2J2q1Vg-y9Uu&B5 zDDwWZpwJZXaVqYS~1$XEibeeYz}qM5&cVuRyOT9o9-9P zUA~5kqJ7B)S$tEA_u7mKt%#r9A!5^ots5U90kdf_aKw{tvn<8H`AJg~6Y%9%rE&88%{KBO1g0d}Mz~}^>x|ZJBUWy|I1%A!yAhUT@+<&~RTUXMPZ5|%wfiM^d-{*C$ z7Z4C$L)lJLzCf+9R1o&u&!7#!gltF9D0_hI<+JlJnbR!NT_x-sfRWe=$db-C20VZP zrPD$4wO|^+Mhd(QAUf#UR_dOYn}gEJ+y$)#^kYRv<4p|{blQ<)$ixNF3FyJYhUl}C zceCkx{!gCHJ*R#$m~2+g`P`xFz*codl{-LFpIGo?&em#vA;2BJ#Y31u3(2uQx@FHZ z7tP53=B9gC5R$mHLfc6dkiYXLN9$_-_O}NqN#4=@hF-k@VI9tQdrN-pG0a2$6=EH;U(>Cnwc?@JlBosTT;)zJY4?H;;6XGI{i04N&RJNKr9!UxTqY$2v zOcdN2@;n)L9etDPq|NH%pi{*igWi{tvwZ{A7|HCvu67NME$elVdD|US zmKmxltzj77-ZQw`)Px`OL z#`U5%Q!WQG@#6t{?!PbPY;OkR<2!N859*b9t6j%mv^=iv5!n-R=X9YN2bXCtb+8h_ zSNC{XtY%$clVbi%f_LEY{{31MUfWt*2X;-WRGn`NV!$~4gK%_MJ9b1^ccv{^=$Eyv z7MQIm<&|LQU+lh_=Z#olPx&t9lfSP!d~>5zirOb8E)}6d9z@~7PsxAO+U!Qj^m|%T}8JJr6F@YY%i=D z@plns*Ei2gupt^ShKC4n%~ZlUZeWNI;4y7HEK;Cp_+t_iEFU> z9O@M)i;_QL)9mh_i{0&O^b^l7UMUH-Bi+APp*sxGX`6HBKyMp}^ddX{iavUzCMler z#z4u1`U4QcJ7^0^j19<|$Gs3H!x?#}^9P%)g(+2p39 zK20Tk?NZIgvXSzOA_ZiM%gy?Nr<2)Cyy9gzRMfju2Al&hlI+EL4>Lhy%}UX;m55xO zhIZK4^1ib57)HF(YKK!&RQ4v3-B8v{d6$QTeVfpEz|JXm#!~fQ~qbxruG`857;>LPK1Lc zay+Wh;%@CNyzjHTOilFiYGii2;VSnXF|c1VHXg0D9mzA!RdZqE2}{V?G6Y9R%2g^h zHtmo4#XB7@&_&TZ8*?r=v%jO;M=ACds?~W>gj^VFN16^xCKdnLxDP zQ|)Xl7Ro|-b1vwCER*g z&~qd+*~LmM{+!{6e){y~Z*IJ;V^jLg9g(LXkJiRzpYe(`R8bUp2I~}=7}7bOnE@E% zpfP?jY4c$-%#7Qwst+wF%{&OzpC}+caPG?0d33@X$AL@VwdGFKbhQCk z2nm!6SJQ9>UCA8#$Ev!02+_h*pO3D;C+C=$AuJDG{-VDA*k{{kG7W5b#2a;9MtTe; zJ_Mvc%w*x*^K2g(F5Yl~Q|tfcqTI2|?IfpKKZxrs!f$z$4alrbw|6zQLut~LgJ&wFa^R_D^4+KrXcxFuZud@s>#g14cRoCPgVyZYN|ZTpcY zz@u%{`$w1>Ip>UPg<*b$I#nw5SkgtrhFRzK1<_uFxb8 z_>9E->p&G#mxmGnouYO3p*$8NIHT9Wp1I{|xMZ8aFN7cPI_OHfvdAiZZRz4(#zvwu zJK{CUZfWt8sV!j4%y5#uur2q88GO#?3~ywHM~Z=*#ir0wGA=QE=U z-HHb4z1h5cG)_+|sHgzXn(Qu!W1b$|kKaCHSz?$$8^r7=n^UIyxX5?{|{IL^0qgP9C$K`|3kSBr?w`+W5QRP{) zmY}2Scl-=ul%sV}I7EaU>99z0S9w|jeN3&{yhoFQLV#NIIQKxAk|LxMnJr`r3d(G56u9MAA}4pmx550v4!k! z2tL~mSy*R%reB(c=YALe1tc%RukMVWz;oC%nw`?uMmZFo_f(vmMZY+fIV|n`F>?8{ zCa$ohalzs%Rk%F&RmKT;_XeI(2hZ9i8XKbH%u~n{-G#1$v;Q#NEj|hl1_>{;@Wyj! zMu+G-AzmlwKbngKX1siM!aHVmw6DY*F@*Onv<-oU)?XlNAsI&B{VlZTWof)#F;{%Qn^@<}RHznHld0BkP!j?Q1#-noq@lJ1-tZoz;X>gEBVKM2~U_v2C@v zc5UV|uRMyP29hE|r@8#P0C|^>1~#;-?f3W&xJfy|JbQ}nYzji{vnyHS3$KhJOCpE^ z`!0h|nx^~ozC8Sw-;}}2(3@KwB4pbb)bc}6TBZ3S@&mrdGdaxk^`S|M1{KXrdnMud zWrDP7iYmR|!To?rIOFoXoK_19iz$qSj8F*l6nI+WIa=so3=f9sDRK}bF|>s6x@m@# zK<8f{FX+5f$}-3}J>1^iUAD8AuOg%1H+i$Rk6jHm2v3L*zxSa^dxro(JyW}O^31ed z#WJ|2Eb@dgO0~hG2ZWDK_b)=vZ4>;pTT0qs*bk9AN6XRLt9}&6&`vVix|%$MB*8ne zv?D6-F%HswIqw~ssy&Oi_Sm`ZPzNHFQ5y#cFk0gu#fn*sxSy`+c`)>zT8OYq*>&q% z<^|7Ob{hG3^x4^<@9|b70w>A7-2`(0;WQwREY zteL$M=!Hl2OJ^=uz(;_UaD0<`tV!kenPoz-4F*4UW`-^3+krS6q@NDmuDqME{Uq%c zjUZFdrOM;~FYq4>7kH!$Khu~=hCaZP+Zsqpe$_^Z&f6jCCB$f5z>0obLssy$#LmYX z&HG^O7`+*34;q&V3uzRq8N-@{K%8t06;Um{OSt-e9-UJMTvpFrnc+0PMd?OlM*ll&*mTrgHGSn$GWiG5w zJpS2acbctp1&;FOz2EWRLnkxIs?x^g%T~v|nhy21wMEJxtHb2f2Z<-DBA+Ob)^w=; zW%(GXKc>EyiS0SFtuie(G9>Q3<}=mv5gm4QBM86+0_jAOu8u}XoXcfX`wt>aa(7J^ zd=1n^L$0*BSECW~HpMG*=P0lD{^c&?ws3NG!4j5!|DB6!T?u6lJ%;;>tktmXEt>T% znlHDw%%Y(zCeh1|Q{o)ky8WX+s;nIRh69(9Mv!ckW;qZ-qy*>gxm%CKvPUTOs^`it zdHPSF)riEq=fSi4)J1x}yA!o*pheBFOmHTl1#s6PZ|<=!`n&=1JwHc~h)2>yCb z@D|yshR(t`xj3c|-?=9sx3CDG{M3F4{DtG)Rlx_Z@WwjX(GzL=a*E?iN;4*h;~3=b z5;A+;y06o1gfg6bES!28#r}dl`VWm$86nBe-0Byfm3Gf~XA5VBIqsWLf4HOcD7jlC2DXr(h6H z&6!AboV9y;5#Vaej<5Hbcg@6O=AT$NG=2Y1pT}FxeZ_uTE~JWvvd-4#AH|KwM2F=9 z>JY6-0AGl3hu!TS?2gNFd-A|HP3~F#(nHp_A6EPx4Zg{scv% zz54rn>+#MGvB1SlV5rHNnF}2`?P@@*N(Tk`?UF6+($6d}oBXI}9!|<6?7i!6xWWVn zXN&8uNX5p)gWgNT5L02&ugRpC?&>;%Az4t3D2PVq1=Vw;&8lK_t4!-KdzmhB_va({ zOrm1&G3oY#Ty)M@l_1)Auuf~|G_)j5MMmJWbAxWPeHt({2ae zx)K}jDLdLp-Ky!d!}gP!V+84bgbfZEA13Xx@Bnv};juh4@~*$5q0D8IFPgUA4e~&Q z6f0y{=IZrDiwY##MN*Z{oP3dS)^%Ho;$2ahyPnb{`g{e!G>hJEZfdMEy(_wh_=e1S zO95x({j{lQhElC5BQXWT7we6swsp9I+_QysauYwTOwpPBSNCXHs^gCv?4S%vV$48} z*35<~yE`iqb%Ny^s)zIzl`xR1V9DsK2Lo6};zL1DxW;INQsKawnos({*lprVwDI`+ zr@Al&;d$x9MpB*L#?D`}ZA70w@!`qo?#2ZVu}ojr+;zelUZ?oL?G3p~u_BrlypXyYP;1;IwaguC%;xy@SJoF{p z_(4DQsl-1_nViLcMnt~6>c@K^@1(jHbsIiq{6}FIVW;Oxumd)Pj{wZIYzEsif*9d# z;1%2GMh~H5k(XA3OivZRH;<}XUK-I@C>*Idg?W5aFF({Nj9CVz+}UKomBp;*tdE8c z#g5J-x+xePq@&QV2JoGLI*o}mT_4>x7JPq5^Q@kU{u?l#x*?GnD@j!E;Qv-^Q~JEB zaM5*+y4&zs7u$9)YWhmHX_DNiaZ*Gl^aXC}I9Dsd5ahq2Y}RU6X>6UQ`I#-4@bRqE z5Fb=UhC>Ecs?-GZUp+*Fjrsyf`BZp#`<`o8wOZUv98eB94Y4s05`kvBE5NS^8x z!RnbH>*O<&As)SHo&xfM){djB2Im~TZeGnFWh!eLDT;U`%r?hJ(=GB41~#7+0)p0$a$vB3kL)%2o+WcSNU~QIg_)K z?33$U`VM9YdAxk^Nm4Fe`tre^%&mw&Su?9`1)T!N7u4~5ClBrP#HjkUny6~X0{YcQcc+|NQfi-Jufs7JUiBMLvKm27 z&8kvr6$P{`%{?EuK%{CPrv`SNN5IKVB#n{pIRYvSpfVzv9R;iTDOZ%G53B5BR@ZH# z{d-c-3U(D~E60f(9HI$?eGF;6~wxP?49;tkGUDPFfdzh0)$umUKATvuiRa z64u@}1vSiE+H_nL6s(9$#i zOu=Srq2woD?E<*fmO7(VabI0v=aYSY)r)i1H=#0^3{I9OT!OJbZF+a<;2QiX_I7?( zBjkUPlIkq^at`j5HC!Hqe+9$hbTDsejna&^l(RK=HxLXQ!Z29nS8{73cX*u zN@*wch1o1V-e6nlBi+SavjKORE$}tQ4CJtUSc<{~Yc<|De}qX~p;Gvm4Da}JSqcy* zZeWqU<{56s+z+>Mhod>I{{EvF3u!BO@}B=cvC2_Ro1%&(l0IIvOR+Jk-YTJ@TCinG zI`<4>8nD*O*mQ<}RdrLacA%BQIYk>8>2pgySd42$drzi*zTr@Nq!`-K!pqrf&i2k> zBsL+(Mgp}%{OIGe#gW-sWwjL)^5yTs>pq*(H@Ot-f^}4lT>076c1Ah+RIZbj3pSR( zH05~up{#%h>>F!KFcn7azG+Fn5k}+a{q1dF9D5@-;?!L!egQU4*4knTSKQ+do9(Hr zSyQPUpPO*Lb?2n(@=8C3Vp!P!6g0q1v{`n3dbFm`g6y(!0g66;4w~Xc{-GbMQFGr% zz>ERezJI0vf~c)T1MgF_-Po`%IQ>jTI<7|cYApxqR-(7+D%l;Pmkn23wO7e7--v#M zdROtbHzlC`P|;L4 zrOn^1kXY#cP$x;K@eJ*^3VJN|@(spP$XV>J*Z03&*N<0(2ab7J+N*0PD}S8{@8UwD zzA9`8aD~$cIc2XX>Bj+RbG4=*BMa$e<%fS#2nwY@x;a(GIDJCg>-vY2?=P)#g)5Lky*GLnZjhs6D)|-V7S=B3y}cIO z-spj=ESe9cCaQu#mK8XiUS5`&QSx@bMX~pH&zh!R`M`#?*jrq* z#@QT$CtI4*9eHS0pVQdS=9&|gf@kXuCuY-OEYjVMo)GEZJFhD3bX@*XP_HxYLU3Rm zyno+_ET`&|-H>S?u1rQoz|0{ttj^@K_HxJ!)xJiXeW+Gn zm?crEyOLZxqF7o3^x;JdH2Tq1erBbTKe@6YGeKka#>+pb4_QAGFE{@YiHQ88 z7ux~CSS055uG~fwwuiKQ)Q7E8QLUNMpD6S4>mtzxq#hu!c}9&j7n2P`c8Y(5{)-g5--(|K=6%WY$ZR?~ck7iPA;!y&N~8+_YgD!Sn+=A7U^ej|~6l zqrc0kJDv8yDaF@=CiAaKOSsR;gU;{Zx@FzN^?-q~@>4xov%_={aP;(0vt6%VB8Ya64xNJX5YsPgXC1B(Zee#chd_9dRZq`rFx5LjIXK36yIS5DfW(0_E)p#k z73Z>d(?Sqwa{1S07VfQi|LtLw?Cr)GDS6Dm$@t~@RauE%632MmX{aT5^&bV*AuCpO zcO!5aSbBTMhgZpwv3~D_Z{w(13&6Q8F^(WOrDkVJ-}csd^5+wcx4wSR0UG&Il?60ugn#e9ge>yZCGSl`?XzF_IpY;!t;>O*;3vxE801+7>HfG(ZTDoNO>i? zr5KY?)G-aM4~}YU8n0KEWBNP%&5@PqSN#Vy$_|-@E=TUN&*$71R3RZ#9dblQtX(ho zzKyG8exZMbMDBH|wtTmGp^)C+jq3j>jCe&H-#!I7Mqt(!;|l0Qw6G8+PCG?a`T%{s zlq;bip<)SvNAk~xjVp5eyPYx`nP#bOQ*af0CaGZ$7=cnm+ac>rL{Cc+#?|bO9uIny zV4|)O9LfHazLIy)n=?Hz-B$K}w&8H|wn*F{a$G2Q7Hq{n(QW*5%CQ>ik-c5Og@_EH z39NMW_w2zIaJXjix;jk>A3=QoGnyPUG}J<}X-Jd<2uSQ&OZvy0PMm7_i*LIEiUG{`&^{fh-(Xf0ViAeLd4-k3g}Wp=J^#U z@7|h?>_s%+QMMwp%2NtB1zdEmDXpLpszy&cm4-@Hdf2v`i-7{kWijN!4AZC(UE@M_ z7bFma*8z*$wB8|d8WV$yTnLtK(CFjPm%6R*(~ToWvSZ|(y}g^2m)=r-wswXuQhtc5 zbZN|IP3Bf#6t&Q74t)t zqA2Afl!72fC+=ujIHu}&Rzg{;!e-GF#}`7|1t6#X z0P`~61-!a4F$PboRpz*44Y)Qmflqf4ZN3(V>}}Nd)F8gu1tV z&D_!+sGiZX2${cf9_y@eCf%pe#f?^$ZKeaC5?Omd_8I(>?;2&d5ne*r!+0g8K#5+Q zil_$xN*ux+5kAd`^Wz$#P_fEtz^9M##yE@%aC7r_4^uWOUk>II&L9s>3UOY-q(q#0 zR@!I=mc34#Io09$14=fT+5l4c>?>?p-^DW%KL2{+l5L^4nI|_m78KkG)V=AN_K2A zgfR^sS?k>6sTB;oTgh-S{b?iT?-ZG3eeXm5<0jy|q&_g%J8aq&ZEWgq8lz(VvRsg= zrmKHf5fyti5GFsR%!{2>eiK!lBR|ZLn?S+-S*Z>lz(%vPWoJDA4NxMa#B(ND4hHK6 zEvVGY&kolN4_(6PZoc2Ar0IBKE8$s7;>_T}(C_gJIU6FiK>ChZ>AHashQy9!QO+Ay zHr|^GHzlW0;41_J{VrWL=L03h0)Js>32r{(jjP?_d4f=Ybvn0 z%_W)*@+geSc-(T8)0b%*t8%w?+Vm*y^#| z8A#Qp%rvpQ#Ly#WNO$)fqKq8Gs3ypv)l!}mYgkuoq$wIeNFQ2eKAJiX6XxtZ^GXSn->#-ty7>b=u@>gTC>mo#fz=#F4=>yW+I4<+e)Zc$es!Tzw&W^AYF z0C7bCZc&OQO==vCC@0b7U=CbOe=(jGGoOA_#!w2m8tL+iCRUi9 z=G3GHt^{nHg^f#lOU^rOkgk}K+gD;ud8FOipeG8*u@mYRtAYp^VJQ%u$1C?7Vu%T0 zV4}A7uGLUs*hN)x{RuaFjoY4ypZRzM+%u`RTi?q6N3j73+UU`bMLVS~i*D{$FXv$A zZix->%C1pnZ6Sw}Bb`bo!)|YR>8vfhcvhQOkOd_YAWC4e2jqo?soJ~P>>W?e)0ieU z)fHQi5^q!`k5=QMFk`Mkp;D=R$n$BkR*=-eUy}@_a)D;TO91nxPA zK!a! z4%=>PW2QM%96s}$%F4Q<=NX-StPwoOtAFG}2pMo8s`5~yDGj(Klyy2lx5aMlu!`1L zr$O;CD546C*kk-0ZoU+4fgwl9gevK|d&%cuGlffMgBck+>K#_kzEW#ldk@+HpK{xJ zSy*MG>#y?VZ&Zx4O03m7 zS{zP$N3@l=vNB{Nx4hr(TyB}r?qJvy{o!4@Z}|HoT4s3#-aia;Of;py4Lh_mJYS5c zgZ=eU)~W5`=GU5-FBY$Rmf9r_xlBVijon%y z+|asx$oOKYxX()6$H4}XnKIn0OE5-%c)oGGbgX*0W9FO92f`qPxsCN^Wza%yF6bWF zh-;qasiOG=&uHF8);|*++Aer<9URD@b~KuCewQMeOlG?GHeY{h0&+EKNhnz6rUvdA zE}+b(tK3&Vi+dEx-AP=ESUUzOT=6|eXoGuG5&#c~sg+uU1m53UExPk;93hns7Wcp$9 zw(Ox};~R&wJO2xP5rXcJ0ANQ_IUIdWbQXR)vGG@v4~?wtb#Js>tHz-KOK4R|*tqG_ zBKZ(j zW*fzl)d!mHz^r8F80V+B=Dy;FQygp*=O1S7?ADUEMy+PAzgKH3h7Uihm(`RPysxZZ z!{>DLcIndhXCZTdSQu?)u_Eac)6z z99t3oV8sui>R5jBCk8L+2e)B zTJzr-eWK3m%T<+aZll`~x{T!edSr3>R={*yAj2x$^z%|9m@gn*!J_8K1Sf&s~!y-q) z&#})q{Oe-Z#>=JMCEQon7Zy68DKvLTj3xveAuFDrfgP*F&T0L1r^=h#T{QJq^FA+# zGisR3YsE_GHDzV4it9_-`|o`%*!07!Lmh>J+3MEc+5Z4>BWyO)*KROKW9~Vw%fbHu z5L(XeCeZHIFC`c`okkt`8oJce zaG>PyM)JAio(4d#L-=v~8TgCDwjO@5d1D~SdB@x2hFG!A5QQqo*pNN()8<=WA6|H; z!P;D!tIKX#Ff?O<$2of$vHvl?hWOwggweb7)h}JZ#HG(ZlWzl|7i<_2qh8E#j zTO9}lFv%ka75N4;EvwBsl;a3FN!>y`v0CiAC4CZBS|7h;xwSZ66)857O-fGft94fV zZ+|21f7xgDLHG~x6Ty+*Nv7%(YBuT?{X9i`bq&leMt{{SA`!vCMr9oHkzKC6{{RHG z_$RGhE%Y$T&@`E7Y&9#@M*je30J$R^anin1_*wf*T6jBG9!0IJak{%nAK8TPGh}}D zJ>>0Uj20OT52($33E*Gbg5l$h)o-Q*1@hpxAh{i}k8n@7rFoObLX@93y1e&Zx}RNz z!eV7bI*m8oTF=dYUHTrMp?|?Nz60wzKl(4j-w%0vb~WU}=I%(?3=mB61H0(G>%%{3 zzxW|fg*E$Wuf87K+gaYWSZTNDBaUJI!x5FoPIKD5Qpfh$yR^NQNi^+7$7l>m)7(gn z>SX7FJ--@;Kilt5x4l_D!wT-pTuk>KTNekak~tjn(B#)n7dVB~snvJxpXs;YKdIJE z$tJryKkMXvRQy`~j`bgd-X*xwb?p!PCi>i!Xy=D%VVv%H^0*X7Xib8NQ^={ZWBn(6*W$@(sv zccX5%nnNwQJZ>UFe;U%Zg*@3NX*{=$a2eEPFh9C^6_MlL7Ojgis|J%hcB2k)-2VWD zaL{?34)Uc2H+tnJh_ zz-U!LY-MmgN59jxbJxEbKFcCTlNlJDzbW~f{gK@F>0H*GrQUdwW&YB*n$F(`aEt;N`jz00p9r_COFu5%U?$1(VlB#O;R-|X*uZT6- z(l|7|R_aLc^DVpN{c->p{*>uFU*f-+TSeBSY=h+sbN7Fd$G@$5W{2V%Yy25CVm5<= z7~EUaImb`NwRFD{FENFRZO7$pfN|@`73@*r{1(xV;8*s2G`=6i@hH4`sKYaE(zQUU zpbS9g2b|>f{OeOs_=|mN(X^U=q&AQPHN2mmK9L0GWb8iQ?8%lZx6?)Xmg#f3NZ~B{{Up3-o#hud_}@pbRq5N;k~51nosil z53Lu2vM%i?n_<>!*jtahwp3=cwgXV>$tKU?^H;Bn(6zF}}= z)g_V}h$Wm{eC`VtUBKaq2Pc#BSFZTW;m^a}dO!F{d?BUj8itmN&nC&4F73eX>C*rX z+2q%cYJL>cd}pQJPjJxbeq(vMY!SZEXu$B&IR`J(DmnvS50}cF8OdRtN>O?^-&K8; zlh)harJ?F)nOnmsskqd>soSEp+iyjEtgnB(ddzVtt}bk3JS87 z^LKULqi;-PgOl2g16Nyrhgys=Pj7f<%$JPOuKAgd-7+3XBb~p?HJ`3{e@DBrn&Rs6 zJB!I2<|xk9wUZbi9{C{l$0M#QAH|niAAq#0d2}0%PD{z-G0ha%WetE>fI1#=o}=qu zZHLO~;-sf$rrd7abbPD+AIkp#BjfP7(!o}B6jZ(0C1%rH-F%;>wz-v~-s)F&%l(<7 z!E0w_m3wqy9_$_oCz4N7&q3C*JY9a;rOa^MTH5Jhwz;}aNyk6{IAh7=`qdv0Y7pE? zMU2-8l^$f5Qw3Hz=L4YnXZhAdnlNJn*Az_U9Q^u zzUv=5FJ&vr)_PlAeDB@guii(f+3B7xjyJTqmeS%U#G3~Aaq77|9FCnnm2*lhW3Jo5 zGRHHZ8*U?Jjb0iC*WR~vpR`MCjMs@1ZR)M} zeL&57lHX&?t(Ub0N1xrcm*3X={{X=AJKq_2vqzKc+H4Aw+)XOwkr!-u%Op<$= zvGBv)z>c&1)Nx(SBAVt7E1QU&guXMOJo_>0{5s&`Yt1M2zcunck=Hao z0N?1b$>IGQP|$u+=0_%>_Q-m3kjM)D2OW5?o%KySO@qgmg5K`lJwDyciVS8&W87eM z8?#Qi_=l|Ok=onOcF~)s0V7kebKIO(KZ~FIk=wVg9+<97MKu)VX6@Ulq_EX0)KsOXuG)3erMsQhg{)0$7N7Rb zK+kJ)Ahr8EXUj2Xo=0$cbI{jef1(Xy{wr8xzM9%#s|&?;%J%A~a`h!jhV>a7^NRAD z-9{(fiF&tvWn*{|ZBo$$v_v|H^huWl7%Ep6ww zmc4#*t&prv3jwqff=4;8cGdp?Z!h>)EfuSOWGsvcYi%|~SP@ufcI6y_-=WS2OjqWl ze;r|p0MS{(*FXs#cYTI#0ouuprEoE{b|<|u4~~l^thU!y5X!e-D^E5WZoH6v0N@NB zoon&D)5AZn;vOdyoSS;9@9D0~?>>S#{W>(&Dq1DCW$UN>1N4&P{t64LS!}nudtF1$ zjbw>Tv7nI~<-t*mV3p%J3(f{>QtRX2h5jW;ANKO-Hx}~cBzucc41{D68w`+r4;_d# z`I`@m=Ci!Fdl!P@8&J}#-W)eQ4%q#_g_p)FjVDc?H$gQE#tRZ5+`d>-l08Q~ z4@&ZDcvC@ISZ3odEgrv?nw3k69Aw;=;(mkb`bUEAOIv8ZByS0eq~a7|CdVLQ31CYx z9Q@rm=QZ=E?Hl_p&8ow1b*K3A`$JEW37%66t;&9o~69y^i0`8rx91os|@cC32yO845W0RCOb{;=OFEgY!%U zMOu|s+Wu&!-_5K2?f$2wM~P|GYBg6f^ZdJ--WUCdwGC@sg59RGmDn>$G_DuP1E6** z^0CJXLC@DBrud`#G1~Z6>T7!>nZzF>%90j@xjg~k{(`goG4X>?*K~!kj!87jk2vqX z^BLKJ=nAfKJ@Jw2T}OjHEnMptZKdkhi6m%SE+GBfyz?O~jO5^f{?Dm3^;nE%bxN*|IjN`>IN* zJ92T6liIwW#uoaOyEJ!JvB-+1KQ97BW1fQp2iHAD4SkkH#8oQW+AqIH@;(P08uZ)d z_m`2CYo$p7eX~rswz`Fo7LI4%wY_uO@vf@J;!8D(+uqyV!m2*}WjADZ+y_s_yp`-O zEwxD{(!}=dA3K&{gD18>9+f1o;)!Nr%?z=te8YAFcj!M_`-tKx$tNiey z7031JaH}emjp+ORe65nAdJAG61( zM-Te4X1FfC#4kbl^rXB?Td3q{B)sy##?m*R&yGF0qUXfLt^B68w`*9y+vc6IfH=re X-`J9C)u%;8=#JWSR9(_<@Uj2d>mT>!jGU~c z?9A-JHhwKZrU0-QNI(E62XZko1sepgu>B#aZV7S+IVn1U%s@^cI}`8@ENpDQ_7D(2 zMgZBF{&wY;=-+($?F&%U-qp?pzyg%9GaE11N>F*m!UuPFMiEGV`FX(9v%*6R&EmmaXTYp8;~i@FUP@g{y$X* z2Wnw&N~7XpWM^vRWcoj_pknC)k}{>?Vm0I8<~BBC4fA6OSQaj~?w1Ka0h1(`9kv4I%bxH&i&*-ebO7>(FjO&E5la_m8fLJ*uq*gHoM=QXoGhII%9bY9wno3;BkBY)asgj|eu0pK zu!JyyaDuRha0dUFLAXFvKmZ^#z-uQ6Q}D{{_X_ZD3<`k=wxr|Y6AM-a_q05{Dsp$LHWDDet{WW zU10cw>*p7Ufyy9fdsioLF@Wnw0(_9a9#{aJzwb$~0$7;;Sg`?EIR03%16a6zt-uu~ z?&0zdj5-$(Sm~V#H-P)M#5)xp0M9=pz_FRAfLwHd;L-=Gf;?P+Qnp|hME-aX{o_IE zS0#bV)zs2R#NI;(tjY}FWMKtx@v!QF)AmPpodJLB|IW$Y)d9TU?{I!U0jdC1os8_9 z9e#yn;t3R00g8d#EKNYl??k|MRgH|D!6JV&k3T|p1~C8ilztih^OPhkZCpT3KnWXg zRAL|#ds7fl7G!7cVgX=d;o$nEq89T@P>c z8u+Dj2ne+KdHHzU1S;r`%F1PT@C!It^iv5i8k>p8A+lW<7b?62xe5B!O;)}*a(>Y+ zG+l-7Pji%z!c134AiHpdaym8A+*Xq&^#I+KjX&>cFi$y%?5rxLw{)4>2Q2 zd}tXor&_6Oq`oz^YIi(l2wjkWPnrzs235?KL9d-Foyt1f&1lUpsfo+D2!}3%BGfZ0 zt){W3qMGn;J-knXI^WV(o=Ww-qL$`G$Tj=Y@0!RA`<1sKpTgy^M*M(gO~Vvdh`rR# z*!SAKG#K@1+fKbHyi>G!qLK0eogi4}$)^{kNlRJVisb@Pnx52Lps0>4@wi5uzC0)s zByRa!17BP|GwnZ4X>!xrnq;35e78<>9AHlAIMwq=gNJdmG;H7bvN62V`6AHXqwpNW z-J6cxmTDMMy(YNRlT6};@<2OEg2Fa`%-!gwKUnKXVQ(Jc3pIFYbLaM4Q*k{@wwU)B zo>C}hBq%tCNGC`rmZ~lYEi4W92&c#-v_TNMT7AC=%aibnB=RH2wQW^pI{M6*4IwhaU8ABmbrqUcz4NnkX&$BhXFCvweRE5^PLkzo5v`T!PJ*ncnDFEY zOHnV0UZl;7F44HvZ7oeiz6TKJZafq5Q|unPY}mRQ*o3$)TR`GY=0cwL+z1B&uWu%w zvIeR{R(Jz!@MF*L(V8%wsFDSXM?3!K}`M; zSn316Cpq^NdwWM%<>GzGr_u8EvwE%8*qG}kDOBS}+s~vkLEjiYJ9y*8{ooHhUhiM~ z=p(#NB`gSBPR%BB??k!C&8FLf-b!1x3Z6trkTI}|UyEOmUPh0e41;s^YY6GO2>VH{ zmE(*$FA2N33!CcuU4qqEB`Zf)K=t`G^ibB%9266Sv!>3j4)|+|R0pCtE@44>_2{tR zIwk}?*MRyst)H^>Dv00A+uvyV*%YCk()Ql2vLtqKle>ayKhD08DcQdB<|exlcqv9e=m9u$knO8G{xXn z@s?doJi6C1Ow9b3N$ zkoq=+1HP0k+O{3VUG9MX20&)(K~+eT|BN=WF_S^#IH@x%M&2!ezct4~T6RjPHL_mV zZyN77Z(-7J34Xl;CJ!#HDUZg1e1}Ume3+A(Ovw##N`%oX2Xkvb;2e)6^p!lGC`%54 zA^2nofnOnHIN}!W_6baNYkCmNI%mp*Bl>Ho`KFO|r>OYXhH!SV&VjIEkp8!tLJspp z2(Q(64_K$o{73%l)5@;nzU2BmA^B3q~1EcBA7UE^iR=` z#xyU)dYdW7(0AtJ#VDU9i6n+Qki>0CMjC$!sfIu*qHXQ`Gmt-;NDeu+N9)bupc%yv ze(Mhv(}#qXV^0@b0%Mq>!X&t3eGWuX>x^DGcA!>4Jfi-;Pnmh}DDh>5_j$3TsVH^GCKTi9-fmGt_*mD-e^| zr>fxntwV+xzpJ10YU2g`5H%wEwk3I3^dK_EWnEm5!aQao{nv2)riA|L#$sX6S_ghE z?lA`++|Pq4h=CSKAi=|lRY6+LOsTsssu$5-`I9t%6!nKMw@s?<_+8=f0gO~5OH9gD zJ21Elb>kH?S-k{&McLUAE^2H50X2zdJpyszs6^YY)84ON(`2APt z!5gO}%1za9CwV<_`;@_aMZ^sX4nH{-#N(0_YmhOabASjyM)#>}^tFvd9ts~5nPu@c z@=TeYbnG3t<}ZBT`qxUd?9H(XUCRDd$Wn&q{_$ zRx@_?x!*6W;P$Z71w?=rt^0inWLTPy^wEdXnu88^_2Cj~59-Y;`rKMusyUMs*2!3# zeBc2NlLA73pROlakdB#|tB+nWmkyZ3gaY=~gr^Om=8XI%_w^EV$d*g9itcLGn-^LK zBGph|F>RoZ8x;F6cwx!Q2TbDhJ{~QnkiGJ38VaYbh$eP#-m^k~vWiUP^TiKCSGJ*% zh|;swFbSd&?0-##(*E`b1b*PU{3CZcbBw*7mQFTx)+>Vu=>?G46Pp713JOfwa}z&j z@mr=!tw9VYDkm=-;$(X_tC-<~0#>A&?Tz!6Vx(m3O2t7mae|{w$3q`0#lQ!%paSpF zG4zwZ4El;%FSM)1Kb<94*O#~-<0AliPi&Ab^1i|n5FBD9`UZd0~AIjwEGZLpjC zIyjE<-5l3O-lnGds@$#&`M!3oEWnt5q`nPa zAU|9`PJpvuspZQIv`>BVL-Ni@!(F?w^2x3U{(_tO0XWd;UxB9o3bX+nsGz^PABW+Y z)}3UR0~0ZVrpcSdsQ@EL#n+ns>6UFBS4->-9R}0-zp2wzWdj>+}+=5}f*SrFesYc@GZ8zHnFpPeSi>0@Z*{{MGghH+Rx4J4CIdA;b2m?b+KaGKki$QEJ|cx5tfr6ZXYd9+KtLGw(kV&5l>l z%p-8Q^>LKdcN@fV_scJy^?~MBWF{U;$RQ;bTM`Lr+smJhG(Jd);I=+F#UvdZ1)tkD zd)=K0%60~qx__oKahEeOSOMKJYtatuaB3Oyyvja5xz7WV?5V3y7NZsiF%s zx1rv1+^r=a2T`9_lf+D1s6w&-XcI?Kq65!`=n^jWX0Qf9X+>I-CH@OhA~lW1Yn+Ia z9g`wA#u1xjiqJ;(X$r$-E~%Q6d9f`^jjC6^sn8)utm{8tLnSf=`7;7K)vbbXMUPMl z*0EGtiTj)mG#{a0@2-^de>UTtekkT{u$t+u(s)QFP8%Dz3&Zzc>?BFlq4ns=Sdc z2t2z1MU0$5za>OO#l*$M>4hC!?45z)cHm*g(#{;HVQD99=WO}U=f6b7K+YykmS8z2 z!0*`(C}HX3>>_Gm$gT6V`vsfd3*+eqH!Ky!@Bp-^1l^7ysrK zc+fI;{R^@MG9texPDT!9X27ov z04yxvU*K)QgXb?3;BgQHzR3D}_WbM2_-oYtTeSb2g@Gb+KzVy7TO%8w37C8OtJ1Hj z_`g*W75=-@?+N^GTECO~&-8;a!2GwIOZ=I01`&H3(|;vi$`pL0mM)%*lK(hUQzLMI z;2^>2{Kdp616A$S>@0uHyx@p`&+>nD{<|fPeZ66w?6H5a1+&Z>o4YgR}RG?gMjozkU9t z_AitHSbkG;K(GeeZ|34}5&V^-Q*bf`If3he=Fe@wT#Yiw+!9=To;1Q>F7Ou#;0KeZvYzd`cde}LqF(~DUCK|}p_&4}$ceE*i} zf7>(vLxTQ`cmKal(7)}#|4PvR(I_PU%*y}09`Oge_tzfryVm}8?Eh?w;2!ZCL4WHJ zf9-#N?GgWSOO%{Cj&~{iD%=+XL%wZ2i^w-x_}w_5Z8(@Ozg(E&a1p{sa2{ z75x7a@cuu4EoA*`UVn$HU~Hx0VhfZ9_idIx{+0l6voM2K|A|~>;b8eM z|II-lVO(})S6LsdEFt3gk+*hW6kZ})cI;c_8u$z9ujbMy7dvUOnbk0c@E0@08)ydT zL250_#=0@|PJP+611uZFf_R@d3A<|8#y;SmuJXTkVrJ5e6!jUQt}b5J&Z7;b zmZ)+_W;{mEW|8WC{05_fX5autr6-Pzux@>XEaHpm@-I_H!HYJiFfr0PghvHWnv9NSLHN+Vk;j7YW$g0jItO5aRh}8;YMVh(aN_;7o zndNqgULPsV5!1uQbbArVz|>t*#!+=yqB?q%4PVnVz3Z8d=ay7Y!Ne9_ij@d0%B1Z6 zoJAFV3%OtZjX#zBRf$L+8N*&hECNFn9RwqMsWK5`7(9vh(xu@L*`P(Pq_x!KD%ENr z)Y4~)7;g)NcmxV-$+E)rb?I+C&O;Ez{o$y%jxaGaAPV0$@0m2#5tD8H!rjri6v}cmZ%;gjzljTnT2Q!lQ@pPo>NREU~XKLYR4->|QKdTn4tCTV6 ztu>!HG{1zYag?1RFmP)j4vz(z8^=F5u};cD?of|ScYbCmtM~@Lg!O=dq1r75Psza0 zK%0_-K((itEml?l=1x)`3I2FIM<|=clKl?DmXwfchQTdp*|1wTQi^9hwf}DAD?$lJ zn423!f@QcI^AvBnMUaePGrYPg^8|cyR~n@bT9>L4Rl4OW^q0AKH5J?_j;j*Lc9yAQdSkLnNB%m?Apl~j;1lv7}C)ujIuktH{RdB-V&=E z5mytkJT?&LAf)!%dL_GA%#K`G6tW@at^;XJAU_e)o7c)1SRccMh%lR+IA}(|EzIKT z3gw)q@u`eHr#VpVm*u2ie$KOJln_K?E`j~DWlYXSRNYleuBB0XfvhMbX;($W&h zGeK{yjIEgdMpF&BKSJLV0@E|ELj)>{0FfYL*FM5QZalXl7?zrMkxcg?Q#!d@Rm04u zz<5ka-e752g6sC25V{kQscqvUO{268vK4Y48AUQ1+IwghM#ne6-%$Dhv9 zu5`XbCA$~+7MRGZ=m)k`drpWJlNZqSa5i%8w^@4RMG>!Y;90kzUtNI}9&pZd7h*E@ z-wD@sg4)cUK4GyW{X^HD?TJHQFd|~LQPB@Om7SBw{cfc94GG^$*{x?L#=Z>KrSa8; zU2M6J;JekYFC?^>Om+}<&y7#)`fry|(=&biW}&>ENt~WhSMviRj+ir|HY)huoP?Z| z!m%MA$!qAy%5^GJr(_+*2(iHcJF}2F-B&`)O+5F3Db{SqA;&=i0`?WCT)24m&W}r( zs2Z)&^8vkX_wp@h?LQWc6uR7^{=2o8Y8aw@>H!7j< zFE`KJ@Z1tND;_#p=A+-;kl3Gjy1ZDnx4f~BCeBzXul$%_jfoPnWTSDjlzcThi6f(R zqPSbx@W8!{x&Lu=>L?VOzl?LFcX0A)gL2ks3w2vT{ib(g%f-9s$0afQRZ8>IaHiI= zmJ?qbpYLMOMl{Y%S`*^eC;Veq2i*4$P>LechH{wtX#^fo3zAOAc_!!ic;kwS0-08m4S2W7^@HjP+LJ zvvwD<(pX;WogRB-U0PbBh*s&=oHp|DD_jp{4cW~e>;N7u`E0$NO%?o0GB=h$go2k+ z>0#o+N>%bCx_Km{w?8u25ov9zj}|=z^>CTCv)8Y9t#B}%*(Xsl1!Ck%ZB3$iH6@N| zXPq{($i5e}@+F7Plg!`r#i#MPVo|oGgeB+TsRlwTWdthciM?md`>7(u;p_ISb}*eL z#)2=Z=gl$Z>9vwRlDnW7Wh0)Lm#!Otk>`Z0EJm+Dsic-qLs+q;fhcxL5$7Dm9hQ*} zE#~JLq$OF>#ZxkjwCTJpRt;G$w5zR-O5F=17lJ4&;g>Dz$A!n_AQunYJDDgc4$xF) zd2LJ2I`Bn<3Sg?lOk?Dyf=-JhCzUtSzlAFI+FePW$_I|@-ml%AN-Sz2D!*UKnnJI& z^Ztb4UAm)M+f%mh^W;u&Te*^V^|URTEo1pu8+-di<2T}uj?rl=3c8k<#{U5cf1N!3 zGZMfb=HO=j$Gf#G09I~J9@f7jsCyOPe& zc3%xngot(bX2vz8wV`dsUUcAuylb#@p-#BGdF2EqX#44PU$*6;)jrL_4&^%<0CMQD zjn#>*Y>dK6)b~e|*TLSLTqy^=@-JK5tMbB=98f}>TW6f#wIQFXwxf~XE(^|d>mptH zd|2Q+7!gG9LERkWi-}cTHagymcNtbOT7p^Qg}F=!1em&xJuG z&=n=vUtL#;v<80MmsfC2Cwtt?V44z7qV#2s`w43EDLY<#q)C0Q)3w_v?6GlYU+DmM z?J3m*DM#gy?fAG2`Y9Vy;noS1zaqYu%}HgkW=e(oj69Ee^fQwi`Z?9}$@to52sLU6 zS>ckGmm}du4`lsx?z_Ft;h=-FKQZStagyKX&yh{=VZ?`el$iZ_{62dpkL1>PBijwW z!HeB?Sher0=l-qA+yWk$Lj_wx8i1?bIcpw2$Mf(Xxt}@C8em zHt%#S%!n1-x%l_#w2{hjwfWhJ>I^O5`F&p1ar+&zt@$c>KXGk!qxyx*&h0YPb!#z0 zd0jdmxFJvZ?$6#RF6?}{Inn=V@csA&s}krXwS0Txqj9@`ub_uFc5}N0Ozm$qV0wC4 zhT(Z6;2+?^ITxQue!`K;f^5Ue3Ud&_&f&3O(hiMRq?3N9MFY2$Z>$p#A+0CEGOR^rsdTyY%$*zU?iw!`Cs*xpgu}s@oNcrS{yZr-(-WOy)hyg- z8}3f9zXyN~j(LVix?p zxgCVWSBnvz7PK!6ZMJat{g>k~0++3qBzx`w(r5^<_}0Rmz7|+ZLY-or5+pe>0hOrN z{c)NRxEq>nSjmP^6*1l3c)AjQnGlhMm^+qsGlHjSc3P`L7J$KUJzNTv{WiEsBVGr77pL(BVhP*<%J3 zCYV(;PV>M;3PGB6>jL6*zHU}9aurRMQ@No5zSVjydgNJ0`qwHIe}mNt!SQ0dPqkT@ z8f`J*Uf>xivMuVeD$f>k;qJN`k{aoFt#X4ej7e@%I>k!R_Z^}s0fS-^D_wa_Zri7K zUpA+xgYI4Z_x+)i(iDGsuGyaj5+*o?I-6PvbKTUCDlfl z`I`^HuvK`R^yLH+`J4omyjjRpvyreCN;IJOZtrRN{j;n*r(O;ovCAyuUU3AK;sUG< zQ@lw0isJ4)hD0(&ND3m3y&PSntY%3mO`k1}(0*Yz)h@_cKO&3n9twFp30DnT6sjFO z@v6wnD&>O;geLTMOdSYjc(^y4RXb<17-8Eqh5bn$p4i+W6l0TVfI6vU6a@`Y&4o*G*!R}TA|s+M zh2rnxc9q|xz*3rITwc|?eNho?W5?t0xgc zNb8HSWhvaK8`|w-D-6S+Y>ZStl~$1T6OnAh)$E7DWLHb$xu~pmnqyz2OZ`*<*Gwt| z`O*McuQaqFuZjB1ha_RWDqVD_%B?L^dB^5%x>ECf{HiK_dyWtD(yp0GmK@X7I3Mo- z1mV#b%VY9BPI&`qsFl`13u+uC94z&O0cB%+Msn_u1h1^AA_qwiXQ|DyrwOUriJ2wY zW)>_$xuaT^2ZE943@`c$$=2{CtxjoW7U>MO4~;tDh;FrPL_5eBGk zq=YGaM`k&+wRKaS!c1`tWjZFN}7da$8q5S5+3899DIPqbv>;W2xJ;k{0kVJKQlX@dG7JjJMK%XEv ztT;L1(}N$Z+prE~YlWO#RnBZey3@QtX{?zOc&~yH%x&RAKG-P9*tx2QO6XvhaT+!C z?AxkQYmr*33HM-&{Y*+T)KMIs!nc~9G7B{|4zl^d21$xC!I%N)@wvt|I`}TH9@c9w zI%`n)N%T{ds&I|Shw`p4GBf|vALaE*ufmum6Z?Z>1)#oH=vB+g_eB%9C~`?Fr};MgSueaN?kj17E)gXwAq_p0r~ z_eH`q>|_;)<-nvg5+iJuGa_LRs8Y;Eq7te5Ad&8o*n+lwj{*p%&tasa78Hk+4CJgI zC~dc1V>*WotS-rRJFoj4ZzhkHFk>=+{EgMkSo?}8GHgATy=Dl<0b@=C&txp3b%fg* zl%2-}i4A7?4O!`w>ugN~taPTwE%i3wcuE5&LP=u+@2g6FHjpvekd@i>W~ezg9SY@P zm^0Hr8Hf4{KRZO4)-X4dYHG`Vih5sh5Y;S6%3y>s*c&C4@wp1G=B<>36DmH1mvv@w z)58U&6juqO~ z!r|kx)J(83g@5J>11UlwCW@@$<$n6m{Hf+T5<)50UXM5I8#@2{Sb*Z1Ou4$d`7`w% z$(V!JSDfSli*`)IHxT{-m2Zy)$eJS22Y_&vC8KYXegaMHoycERp*`}Q z@@G5GIvIx;X`*R`v*kL69(@~2{9f!j<~m;VfP%C%Uk|lkP|0}V{ zwh*J5WQhs)D`qDSd5OVAqtLZ+{3^KDJ;;MY6yzE76}*0!Hk-=WFzNht#--Yf8eOq! zN62u<3uxB*evw8-RS^+n9t?TiXbkcwF_OUivSx8ZXeY7+D!MP5i97s;M3c|8MjWZA zT0`o+mlmP4HeY0cSct`ua^)34iLbjx4D+WD7K)Qdsf(4N+^I#m=~7f-KZmb5CUrYV zv@RvVBsj1cxbmaRM$0hS>0~T2bVapkW**wOx^GG3A!#^N!wRZDN)Z`zKfiySRb2l= z8)Zi{iZO_Jdn=&AyejSsL_8sRJfJ<_EKXFdeKxlwyt;_aL_!Ul1x4*n%kHk0l;{(IplgqH0ZDx!SF(=xW_Q^>qIFEp+zFE1q~s}XD{ei?HuQfB8;EoDbZ(!U6``f?v)6+EA- zT|ulyS!$v>EW%jwB^O04ekfO36}bdMgn>>kR1}-UNwl8gHiV98Ha8sEPpsY}PQ0Gk zs+o>=C{LTYQyUwlt>=AWVA6_ zsxZ0W=PmsvN8(I>ch@?3Rbl~aQxuGXSi#LBkKQx-`y0u?*bON2S`zn1VLYK{In2F$ zzd9n~ zpY#TTcmsdnTn{G)ei7>Mt?%8X$`R;NCc6#(ZXGz0DO3w>F%9+e{d69yDh%F|B5!P^ z>Nuh^T2xysYPbZ;eGbg4{TxZ76rueEGVFmZMA04}A7+p+1|kg!y+5fsIviJd4nBjK#Bi>G^CI!ai_ySZf4kH3$L{xoc zo`$S1BG>Crj5;Os42%_fyzRdIzA(k`&M-Y0I>{?x@Q4wlM?tGPDkQiK;_pEFpSIp0 zz+PWAV*dnAZ1*LcL|_zckiema#$nN35i19ZQHTIa1F&&SWy8Z%0um1#3C%2!kMeUV zZ|!H!NU{ksp0k%UPH+5}WSd=yZstGI^3)@DM~^CFy`>HZ7%)^zo?1bx{!FHIk91U^ z{lP?Pz-}8*i{IpxEA%x7t-X+fh6YVPusXw>P=?zZa|{8wuwAe@BE>x`h;RNTYxJv++dvuR^SxX5J!0x z1N;_1ry#RJ=*8zERQ=jcF5YlagWBh<$Q??poz#*^pt^d8$Nk~(`SAG$NjFoT_Z=w1 zg6*x~bd`$#<;Gcc3~IK3>v_G|s{G+9ikri`K(ACqGGWN7fCX8^s{04aTeE!nf+*FS zOJOUK6jn1JuhmR6Y1UZ3Ja3(9xR}s8C+bY@&^m4yDEEbgpRKq+`O;w8NubhMV+Ce> zkbKjIi8PXn>A*!zN{!(%S_DpVbwkKp862zoi~?Q|hs zdxqOZ4v2Mv$jO8H8*~+Mux5`K#<8V0ac4r5|Lw>of_D$qLmqZdVzI$QU}FVh2j>+$ zlgP9QloJ^9`2OiOEQg-WuO!acq~0Huk~_YUd=e3uJvALclzD8s;RBK z9c@|IGE0AI1$NrJWZ7O081(nO@bcczoK<%clMI#I%b;+>hZUV(>yuIi4IPH3<83N zbF-?A3NsxUKT#v#mWr z<7X9kmV!@xM@lhWRSorwJ8{f@t49O?*xBi0-!x?w7-Yd57KB4RdF~> zxE@tl0M2DY_Add8$dNP5?@Xb0-@VuMHz$WsFL_O_%}fr#{nBqjMmT*dt}K|v^aZW} zKaChYDDvcK^T;u_a_a2W6Y~=ag7`(OMCI#vZ3?7~VABrc2s*rGOzA@_DPs=MQk59) zCkH;{4~Sj62vo`x?^h*5anatbVt#uFA6qZNeXu4dD@I_mq^6RLm??pP0rG zS4V7sEK%FXqR^tsy~>CHoi^l4{wjG-GIj{2a#;ZLk+opMz|mK+MbXT3l2Xeiua%0o z3->xGz|h4Fy+qq!?3XnHm77+~9Xsu7zl$^K>^-`wlTnrrzB)uNGMxk3aM!P505JfH z-&2Ac+m_79-qFI&+slX#H*JEqYjTx$b{y1ljIMGwWnUt_QHaFI675I| z4A3^o6|f@n6j_HvU}&UB;&D3*D5?24I0P7c?#_47{9kAqcP+4hH6EnE9i}H~1}%ds z6RK*i`0whDHB=N1Jrca`GMt%rRZ^6s0V^XIX1A{DlIW!p`!4W)&f>TEa2-lM9=h#Y zy&8Tut%wiC4YvGgpK7P_3rvjsJe3~99ZWh3%=79F*E#~7gT;5?YvHIx&TIIhC3;0}J+y`=4bqnt)IeoAT8%boQQw}ra->rR)t%4j zm0#g#Y4*%(c+^}*Dj)q6b41d@kxBkhDfAw(Aqru5<|-kV{*PJU6t6-m&Ch^rQ$OUdiZy zk0Y>Bz<#{v#BZe{IOqqgYDxxKYv`Zfe~dte=f|Mgrg-vrZH55 zg*Mlr>sX}VSejHn#4_S&ymohsSc%>_eN%Y%opRJL%tKVvisje0 za3mHP_HmEji{ZrQou3NdjeXEalc;;oLF%bBN{$GXEDss$2eQ!`zCv8h8gc#pSaz>NFRLK0lqN*Swx=ot@KuUX6cCNAIJRD>T(qOsO-1LKpnnQ%gT6=i`$EBM24s%yz za}_JpC(Hp6KQDJKYG>N^PKx!=Si`5RxTni=epJ&mz)yEZN|4y&~Cc>>Z( zlmO0%*|c|5=V`e6qE=Zv#zmfs=2QH!(K*{|bY2asTEf+>jJ2lf-B4lI>T4D=Ln?6E z)(tMljTr+nMGKjYW=*trE}hkF^lmD?{S%8IM!2@#^M)jylA4HGR`#Kh=sY!7+O(>v zx8Fv;+}BBFy2&f5HyXK`Z|jnMA-F6@<40im{L?#}|LxL}_P1lQ2rd%wYVCp2G0L8= z{onfU0{0)4@3socSbgiX>O%tP zU5_jmqx;JF+z_{J*dKd8_K$tU*{S3s@O@5X-bSx1B+0ukJdv@~*N&6b%LpgZnU|2) zW^m0VK7U8={i6kmGUZT8Fe*TfME4m`I0+{NiGHgrJ!F~>GuZb3u{t2C6R>CXoWQ_ zzxoB{AuiZ?#wBSkMkm>vAmH}FWkB_?pk^A+F6t@p8UKm%&B>aV0kBw{ssQImNCmU*48nC>_dXiQFAJ6$vEO(H(4j1`$C!da+!`x?*cb3_( z`OGNz1J<(F&@yKaGsdb5V2d-NzfP?#F#X5GRSMX!2eFe#$$CB+iI1| zT`083^ky))f=M7tFO2C9V;w7t{jCZwo!f;9&@z3y%(d;xh^g+0T~dcWK@QpBjkEGZdN|EBZp-PZu)z9MQ$K zg@zXh1SW_xhX-V=<)Xk(NVZWp?*b?;gjpO{qLBmA0H3}x8=&uEnE7g-W-I-u8AROq z>{592xa#6Mz27%63^$DOi4YCQgQilYcJX6lPW3XrV9M6^f<{kE+pNQNxaW#-(*H;^ zYdW*@93r#T%@Tncy*>P`ISl!@j1#;Sd~5&(TX;Ml_>bz74qF~`_N-c)7DY8zqe&rA zRok<^doQ~A$IY%)%7nWg4T037?Wor{DBJwAD-VE)C20I@ToK0ez>B~~=^|B&GjZZZ z=gUYj$YOF-;zKOXZ?wo_$ukvMbq%Xh~jL1|P4^oIKLxPAN(s@zFNLjc5)_99YrA#gtP= z-hYAYU^Q|6*pq&q9Dp78osuu5^Qi}d9u5*!l!gG8b_3DeePaaFDRQi z7fj8|G|hR;-&S$r6b48<=ZbLa zpB1P=3ngLZ8+P``Ms5xLMC$bqY8EAUHbKU|deXtXIiB;n?e?{vMme0Hn6I_YyE=H% z)8KQfyUsd%jJ`qD-{o86Y6A62`m2-+QA}D11Snej!wJi{DPxrGicoB)EA(9xy*t%t?C;wqs&XGNd;jj!(i3 zH`#ckz_MU>$?xRpWJ0S-0~k?6i%Ky-gj@g%Fr_%&)zTcV$gGJ`eVMv!Hfe!iG=5Y|HW@n zUBqUbsDnkEQtsnKYoP%y5_oD!&@T!&5bjJzGa=1cWBvTYTk0`CpPcMF1~8ZMvLR&C zIlC4sAYrhKy}J)nt4lYIGJ184`FSjb8dG+g$Ix_gn=n&e|k zeEk6it#o5WQD^pkuJJ+5!a+V=ZO#6AC{c+}HmY!T@K=N9AwUw^b1c8I$Gc6MjOX`H zfv&OZZUyA6;vx`-NsW`Fce@ll*A@MI?ZlsMxMBp=xpfVR2y_jqTLpAkD!mu$*1|#> zP7aqY`aV6ox6csTGFslhQK!)JDSwo;KY9Lo{!ItNN<;N+we4zi1?hv1|83M0+`F!8 zBEUh+?PHO+u-CZfMLi-kjjFV0o=a;j;S8I#xNyqu>F z83cKK1L-z4*JQ4Txy>W>Ij$~g+MXikC5=b?aBkZX9#-p{aq<)USJ`mRm?M-xoiWd`&*+AD5F1KaA@okU<1-N7fvaW?-aMBxHNJk?!G5GI zI!NcO=$8@bE$NZE$H9KI;Ey_BHB)RlVO$~~cD|`}_mO;wi|Y2@IV7k)_;Ph%Fkk=h z@P^CJztPiMddSUrOTFFipiFa7!?)neRqp=&M{S)k)dY`6MrOY^>u1;d?QF#M8p74i zR&JX+N4Chsg@kE|-1sDMIbrzhz7ZYp4JQw$OlOd2L|-HLo~U-`U)(Rh@iW|Gp?-kD z230xmhcd#3pZTsqp@KkI@+Y!8Y%UjF<0n0fjEu_jej)+3UaX>QinocY@cEp%Kc|F+q5IUy!2EdHl9=ClC$BC zW<1Na7Qb2F`_HVIOsyWE@j%8SOF7Dm<61N(-l|CKCI-NYwA831f`lWD)fp-X87ua( znObK8z0AJ)%>?Cf4UlJ8s5>YVqvk`1qDKTb@`+Io(Mt z61A*P`o*VZ^t;F8XT(U2n03H(QOGOZdxs0O9Gq90u|Akvk$5K~gnqF+r@hx#9xsaK zg}y}B1dWQvlm7dJ{bV+fM3_STr1R-d#D}@3mq-EYitA8x_f>Q)G=9>0G#x~Qv#KrL zql$BBhd+LdP|m$d?>V!+HtBc{rmwwsQa_2`)1GFKV;mSCSBG6iJTN#z)H5zFo6U&4 zeZ2_dmi3`$RSb~ua9gB$9B9lP8m=dD%F2zJfkgkFO?2h?1QIQQu{Yrkj;{5F1~w_v zx^%CqKrQoKU))@Mp}Yg5;n!sS`-=C;y?39po}y@jSV#dm{4Nm?wj-q@NZuR_@% zr_o67K+3ma+7=ioHj<8@6}*Po9p$TQmi^`Bu|2vX{&h`6*j{41u=UVKzje=x?J?wQ zF-F9B7BsZD&B=iD1*RQXLX>ZK@5-P?>n2Lw=;_6$t7(o~Nk3kG76AsHI+7~5B zQQPc#QepBM>U27bb)2os3yoH4Zq4dCS((a5Ch1m zS?ybxm9Mb!W;hu33TuwhXJ|wtH8ru`o|4R~8C_p8Mi?kwIfOW{P_rSDwz*Z;%Adx< zFk1(GYtH`owYs*1C)M*<9$w+uzG?LK*`GPxffkMdEB&L|WZvOn_TY)IWK`>=V? z(t*1mOD-}~|M@_j#|hnRNtU1F6L~&6*cMwfEYRy@&v^Lj z@v;r<*}VpnuKyh9V5a?Uw0+X#put9qi!(|KZad~jncpF+tkMtdaI-$0%Sew>b59Ym znH633v*|1s2J`*Qi+49|ZsU{`$0fGjFSk}@seDG?sP}y5RxoCQWoNuO8Z`zY-+V=4 z()dQ7(Z+W-J?MWhe4@RaZ6Nrz-OpzjXR9~PsayDrHvYqfLdZpHVSgW&J@Lop^*3BY zL_XBnV(+xh@%gbyKqPv`vj zC(4nCde_}F!d9)zyGpyOo#GfGAZtb0cRS@mtXbA}aUa~>T^o0I85rCLcXxMpcOTq!aCe7626uONxXgU}oV(8- z_e9)(2hkDPN!6QK)m;QD)_O9hx-14-ooED1*`wg9F!8jRIA%RXdfz46qJ2mkgX$(* z?*)&`^i`^w{XITKO`txB=krC77ZA$eP$m(|o4ya-a9%qp>G?zps0!L&zfH z>C0A_ zXmt^oA30X4SZC4yHNQqtDA5=4+s6X5Hq9iAmVX8rt~%737hSVj?}DO`6Y#1t%;Wo5 z2w4Vt5KaouB}_X=b$6OT%&ddp)OvQCd-l1PuubKIS4WqOjjYeEQJ-$%2hm&OEy@we z(ZMhNuOEN{Lw5j%$-OaOYzyy=+J4x0-1y9R?Ra&aIa?Ml#mmYPyRoW~czwcB>FM6m zNu5s?Z(;|-%lG@Z)5ZF)Mmb}h(=LJ=`<-GN!d@S(j(4-)k7T8NN)GPsp2^<;USc1- zYcu8U+Wfg2wi>Qo-VyJ1Z*p%n@4oL3&x^MdcL%o^w?2ypFKsV(?~Atxx7EkGD?V3a zzCZ5?Vr>t9Tz-f2e5=@0?SfWIe)oYmNjc*XrhLosZzeep8ZEdwK(y6q$=2B`))cKl z%^qMGa=YOst0W(61k2KYJ8mOgOTxS`#?f$8c?x$A`0?_)Y%VE&0p<10N;`b8_1m|i zc>U5j0*O|N9_A51DS!N}*yTr9GfKz;D9P(wjyY)NCE%)MH;kU<*? zSC9A5Bo?<+G!x?i*gP-^0M=;e(m1MQH+Zl@H7c?d3%jX)gUX}R0LOa%a)J8I#?pP?K92J@sJZqq7g3YVNej9>?@MF|*q*X`s78ESlGF90ESgC}}+R5DqKOBy~P`i(4=hT89X*W`3Fn zj#n8P8oDsi4P0-Ps&}Yf1??7mrrsE?7)3cgSfS~>SrFRO9hu2g6QB4yS{+>wbtpX~ zlFNT{o*0Za5@Zn>5ZrB;WYJWU=hl3xqE+hGi*tY^WI@D(=nRjAHsx$GP#|pm)F`y{ zMOrxc3}z-sBd|vvPD4(4+@HXWfl&xD2pF3Flw5aZ0V!sBJHl5CAoAm<&NX#bc4&Sn zk(1s?9%oZXY}}w%G+G!^58p}Bn2vU{v77P=8h#LpQm}E)%0(=VokqD1n6v?`$m^T) zx4pa?!`@qek{z^^lC(>5#N|FHHy0##TJUc_r^i;w`LBiay39oU%(&5*Q!N0E7>Z1n z=6v%}s#g=#sJl(v0~$+k&cw966>+PV=_8c|I|gTxdJ_+0^t31^R3@kQ=3)bup%rAw zSV9Ki(Nxy~T2DsxNRaCYTDYQ_#EBSEaz%ouc*p8$6b@$Jos@=kCC3My?EDeDHnPGfDUEyA)!xfIUy#P2og z$f-aA=o>`jL>+6pgH%?^RmZZ;T*Rg#h4;XE@es;5#_w zj4_~*SkYa{es1>eYt>8ZxrMYyG5gO|<_BNoWjq#&;frcUICJKIcWH4}T0SlSD10Ik ztdow0#QdiywMVp!Ykl2l`#)$S7}>z7zw_y#kkFSyClbL?0MrkoZv7PG>~{M|JGGQPRJ{i z6J|%Fr}yi<2t99QAT*Xqf+>^{y$(7V64g#-xI)ka-kwLLr-aJN_P6=4<#Mbpp@)-@ zbN2Qw@BvW|X&0DHwsud^5xS0QE!HM(^s*({fM@vj9FQtAw_VX<%@>NOfYb-{c&$91 zK)_1yxKvm)Cm>n0ER=dlzn*9`ontvVIhNX9vHv&Xz+%C;l3C~1S)o!vSB$u!`Dk2? z*%%ayAL<28vs)4)Y`npab0Ec3XDDc-V&<0oxNCF_69?UuoY)mILY(~|!5T7`spknWLHUq>O6^$l!bA5bxmk#DuGKbljuhXU0Q@F@ej(!29TXh zHrCMS&WwD+*dnoV4BF0>L1in2rtyX6=*5n&OqPxn5;@dt*%p#i(Vo`yC9@^-VLNXH zX)BYRdcz%+@g-f8aZAzO&d*#|M=i2#TBM8ip5DkOc;-jyhl1^ZV*q4fLVtx@aGk9Y z9l8fovCJIdqw2Q8oM+y=i(Df9ifvP^m9)(TdBg>8gj?+DBG*~E%$)Bf={*^-*M!4> z@BLix=?|yt)dDB)vSnuOveCSDS7h;Pxc*q~Jcft9$JLsp8+}f)Wvf@k1yfIS&7?Fl zX`8B8;~ca*#eCkWB)iEh+bWD4?Ld=xgzYSW$GQmXZtM%7Hi0F>d6naIA@fxi69HY& zNweeOt7pB+jTkQP@<-kZ(GS!oO7941x@IHI*(U8O^s7hU*O(wCI`3L@8Lj@)%XR>L zz~cUb_3)Rk*p2EohHUAKVh$Glzw}$SQhI4tdjpxWL1nO$!++f5-FOCP!F8{Eo#9@e2F9oN{ftfYOW%bhWqw zOsCKie7#7mJNo*i+oxm|$}{2GY_lz~O>aeSkMVGd<^j}4;feS%B~iTJ1ias(demUF z3&=6F(c`9HpykFy(3yxGP&1 zjHlnB+rSg_Qp}s_615B26=ws2r_TlGVzn`_?gSEv^^I3Inhx-N3xd`Gq;K`X zBIw5W2Ewc0J4QFbUJgiSgb&1{jyJ>Q!Q+AN#ld9+e|HzyD`SZ6E7SXzO`9tlQ2vn| zFzflm5Hdz-R5s&*HG`OxlEmOqztO9G_)G2mx%G zglvo~e=Lqq>rYKfLYB{AtgLLG7W=2g&dC00aeR(rX8tpkll{}j$@uC2GlGec_46(B zpJ`0YoS*a9{(4zH2eGjK3Bbh8qDjc{SC@(Nk1*4p0f0Z#K5LQwF`55(+5RyBpC*9y zulF;Ze|cFx6<7g(vH-AsdRaLBrpx|Ef%We+_D?V1PgN8`0OzNdh2!s7&d+oJY>b~? z=0DLmSegGdo|);7AnTu;nOXkq#NXh|f5I>`|Iub<`5Wd>qRgzHD`5sO|1sJAWXJj^ z3^Uv3cKpla_*7u|M~ve$;y)(GpABGTX8Hf~`qbm(_)HMM_$LY{=cnE$9`@&@P53_w z6#T7f^B3Iz*WkY&_+Q6A6;b{R>i^# zOATly7hY&;qc*cgug|YV1H|RsH>32t>RUUgCQbCtrA?L$Sk2SrEg%HJM|};(EWX+S z@ur{c{4qNix49;R0W3yzxK??^?w(a1--(Lo$`=OPp6VJn`NJQ^plF5AS!9?S7^9p( zixw!LnpU$Dt)x9>fgC+BFrW+Zq9;tOGPO=CGH*Df@I;XmWE#k&Vza|S5bL6tAjPQ~H`i^*YO!~$N5=M! z1pXLNKA%Tbb-^GZ)sCdD6^$pXG{}D*9!6bI%b;KjyVTn>LsBzZ}zlFH`Wpe}JFW zdzt?YBV=c0|33iJQx9lQrTO{C4x2RIgtQ97tTZ_0K$-gyAqgm9wz_!UaYKSSP(Ma! zy$D~z2oe!+L4ml5uc)ZxYG|f~&&nTnHidf>jSG0kvl?$J1BK|r<1XhOHsrvi^G!_~ z7Y`3>ZWn8x2;o7d$2sRg_6x`-N%*?eLnv)z_07Cx%aldPl}up8;$(lW18@ftB|mkM z%R4(`LojXGB^@SzY2sUOiitma+yu@aB7cg1=u_m8TkP|W_f8!sghkj|YjRa~ZV&;H zfs}47DsdxYlhX04=H`aP*W=51qz=%d*C_*phS_xX@(O>$bDf~qeqrw@ zZBdPh%1L>XKn80j79o1~x7&`|;TMxw_q_X^av{v$`mR3!2AR#7yND~&CxIr_D>7{V z5jEW#swbzwFGL$(W4zA;w=~pMdQ+0&4!UmNFTbnb5V{QV){x>XTB#BNPuz87(GC|+ zI9eh42y|4yP~&D;6k3!MLiPqSvn#f-HQ*Tx@1@QEayjvmwx-K}zG0^jwNVL|-!?;Z zAN29cGcxe{>=T0faUOv$>b8NJq8^&wC9TeZZ(y3};j*5IvGa^`*>fSscKmq}-Riy1 z$iN(8A@H-C*PR63@1yTkbEYVxXF$Q!5pa<)+Vs+<%S=#RE9w4t>fS;qv+}X-dc3Gx zeQ@!d&;K?w*wg-D#YK*>(t8rt5-Oy|-zj$?FmP+8^m>uF^K$ZEKOfZ*R6@$O7XfXU z=OE3+y;%q@dDH9&=RDW%DK|O+`!i;g(MO^!>IoUSWyjxJrYTLiMQx z1)`2H!}SnLoLGy5^~7&kNYYLjk?uEE8)!xr}Pg1JDf8aK1{a$}u? zHUA2HX77D5xx33rgd})Hx7U~nwqdXT;Kf*rRCy~ldVGI=Pj7y#*4`j`qv+Ip{%C>K z=Xg?)?lg?Q@E5%7U(FEQ%`hRJ6_O3pmq#BJ4k`h2Zl<1#dY%Tt_NTOki!dcR3;Aghjiei;Wb)H8}bYs%dZF7A^V-z!^OjgQ~^vhc!yBYgBi#} zmqV0WtmGX5Tz^>(@Z#;XW_(RM=JWg<3K}b_wYG!xHBB~~R;!(0&Bba>kFnwO`Er#i zOdUR!Lp8PxEkJdIWZuH+P|3H6M3r}C}oEqnqlWwR-$E2jg56}n9l>lZu z_z7dzcTd`Nfvzm%3Bto*!_!Ir0t)PfjYTanKqG%ML9~)#{$Y zE@3}4tDW+wi|ZZWW*~eui3b@~^=>X|dSoo!7eNwqihnu-pQnORIXay^$t${AsBvRZ zntv^y25IKrKly5qw_xgIw$f%b;2T955b-jfEq3&r(bbrk2sQtxJ+g-=N9^cP#ksM(*$ExH8*8dUJN^+j`X5 zad8HQz+X=|CN6o=8Q4G>p%FBv42EESBOfZnW!Hy-bnS9I!8U-=7{t}0hl!di$a|w! zfCFcN46Ao1>EAYkyljOs<*N7jecib;_$J+t75y#Bw*{XO+Oh-T59=rS$@=kMJ%?e< zv1<^h66Q|v0+YmOL0<(K%<&|6_g0iw*0={5xMr8)$z-sY3Fo1YFB_FMBCp;<)wtE1 z`vV|r&3u_9!+C3 zf`;R-LU@w_9h*)!wV^j;%6c=&tf9An}-)ht;VVILUa&SJx@CDZ>tKdaiW8N39(PGfN>@Q8zFInZ-&8a05Am8=m3@-Y(P@m-vLa{^)?V3+ zNm9#GKH13F+xXk~I`6mk35IEwMv`|?HcxYp!^qmXeqLQ?pPYX}=x=-=Dd5~jdLT^* zf6+6!>VY1{!NZ+h4gJ<=(m*9G$O>&&DzIQ$gTH5{;aB`s3h8F;rL7>P$)zZX=x`9XkqROFp>|qwX69|MEs$`? z30G}O!^D$qKY2gw^77FL;(c#_o*YC|i#G?jZ$Z~aH)s(@oDxDOZ zRP@}MugDBd16_I^^?6g%*blmpZ+j$g`9u#f)OMA$ex`o;>WBo4B6OLpsf#(EG zz2PRp#=Zg_cT3THmZ>$vz>ds|$Uy1BGnae}6zQp=aOpRjVpc{ZS-IP7T7vR`9tw@z zl;C#j711W1bz@eB%6!bpe3fdZl7Sd&&Ph#3yS-A>uU2i(?O@I}L9FJ?vPH@us$V5` zE!=d`Myk@uC`}+BLndxUC#DuTk2a@YF$J-iKIums`u@DBtf5*7&4y%ct?62sd_Ci* zMn*qdKwLwV=PwisdlbRsMrcdtRU6@7R9(a`N<=#|MaWRu8~8CQwOj-w|n^ zRfVV|kCL7eC=Zax7zj~47?mJ9385649^Dq97RsuuO#fEO1Yrm~+9uyvMiVZV@+a?y z#Do4i*-fPPdv{voIVdzf&MvSMlgB z%bnnp_dD@Im9&7PA%rw0R@*e4D-+OW=JdiAR5Il@iPo!C5cm!aVw8I1INEWQ6ZpZr zd)P~sGQ8i8wAl*20J^YzvCkWiljn`Are?&ykpTqm61&&7`$XzP?~BDdZ9dE!^OEg`*$Oph$pT zS-9?#_d8V9$PBQ5?2FamFzLqc#;o8(QfK2(&Aj)-G}{sW!~3l|$yKTp2GbX_0l{Ne z$!59-@$o_iXlogw)(pe3`8iy>inxRC(Uyag#iFEHiUmeyUbjP5@D^2{EW}?S7HU~N zCvoM&s0MhWk-|Q?r>LyJ5h3|HkXXr4SQg9ia!RtQJy9I>Ud53z6RJo0;vhkrG=voq z$ERkzH5^RZNq0{WS7h%{QsiDMC=I(YE!&BU68iAM9f2EZbY8OUf z?@%@X(oyUq=}o_5LYIZZ!(XU?%_*GO7Z62RD+tTxBm2QgJy#G0cV$XE+h@!sI|7xq zglb(%d?)`60e%er$O>*8rbN}G^cd8VIzu-(KYJqo7^@kL?E}uJgGXA0VG?9&LpAuV>qP)KO2OlZZhHvz^!<25#{`tl8= zRsMlhMn?(QTYCz%e?QJ2gs0loTe{2d73tb@yJPKz=OgBeD6l6^RIa81ej(U<#oUc0RleY}K%#Fv z?Ur5M@}=3Sk{J3Ng@IC8e@UsIU#dM}7$|v8*)xBsIWv#UvF~)nZlaNS)-s}j%-1o- z;$SeyFtsd=kSMG)EUY6lF|SBUwTm;P2oe2fHy44jfq3(5-W6>raUBl?im-9cwEc?J z)piQ}_0BWbvsDq^v1A>$>ofH*Do#$Su=>n#KRe+7-boJT^~AW8#`9>Wv19ZfruE3f z4}y%K`-*D7O!0Y85jl}@p(0R*MjU#AgB7Ip*gtUlxhe{r9CM?Kt~^|^y*J*(+PK?y zh*sU>C)(`chgyAPOG}}^JlMn)Z&BjnJRSOp_c6d@75ODcm+FI7$+pF(1bCHN;YM`^@!;8{xq&JewioF0f*nzAg` z`5${S+#*bD&fRVd@8aizWe_jAq8XM8BRn0?k`wL-j?E*mX>=;fCr4w5Zy-5;d&UX} z`^O!Ze*ez!PIWvu)Cfb7&vW@#A`Lah2iX zqvP#q&4sVc_w_^pFr=1CC$5JMP7_B6F)U3n6>Wf2fpRW3(y>v`#)e-Ut~kR-9f#Ne;Jbi^RDtEq5baR{Dq;Dki^2#r;F30mimA@GCk1!PWS^G{#=y2v#}jc`PF zY91$=IeCjCmN=ajA#M?n%StL_>(fUa#0-P|I+1Csak*?ttyY;sjw~xZF)V`-<{uz* znZ4X*8B+VcRJ1*M&hV0>y;x_Xex4G)3K=lEfTio*EM(O(-??{>N?uu6QgNP)hc5dx z2);A6FhS>LuX7CAoKM6aqIOntfKrs*FZ%;{Lm^0VS3d-bQ!R}o_CDNrdmqML7Ue{* zR|Hq5@YlDXVPch#KzS1kPQ_i-S{AE^cwkkRk0pw;H{J!yd<#;f%5+LI0D(NY=Go2v7kU-vM9LP3AGh>Jy4 z&ugnXHYn6)wH7AUU;U*ZC$;O_{-toVE$1bI(o-_|ykTp!52a~O8>V?R>Ejt0MceZf zywe4vU*qVnd$SW23sz88SoDXIly;BnIChLhv`re-$u$M72mKRE+@zo1r?e4Hw~QVG zq5YmhNbtgRk%)!~v|>Uxm4ScT3nV{rVM(C=5>@QIWvWIjyB=PNF04NH!Sl3gPnJ5a zvN{|{HG=dx?t~;Lgv7~6fvj-9*z`(KapO~wNx`V1rJ`{ZRFSJ`KnxE8u!}|2}}Zuoqw_uqN4~6E{TQ2Mx`KClC#-) z3h6;NpY|weY@0A$E5piXm9;a>T&6snL6$&Qj>Q!g>H|u%?<}*d*bojKn)pMDQ0D70 zx_IqvQ+#+ha`O@FVtiWJ>^K$~88bi{iGiXsrL$XHf&ncXB9i3MP#5_zERqA&Tz{YH zVWq}x4F3Q#!#g_@B$3VUyF>Hg56^f}_%udUC~QL{KOK3f!%tqz>9;?b9a{bVxrU^V zJzY`$BSxk!G3l~!*OzAiVFwu@msaskBe^~1uKzlrmhb-NKxG)`EzR=Hd=a#E0Y2MP zTwK)C0sm@{rJ!)dA&zXP13h4g{bm@>>b!oCYK<8>7YMT!lc zD0|h8EGh6H73AWD`}Eu5A*Uf8r^?sN)c)|B0Db`T(U{?epK>M$eC{<@_uV-|KfX6c z)KKpnArLfZRK{cp84^T%1gzKbfgtMe_;lwjS8bq&RX zic-6UdCSWE+tc--xqu7kcY+d?Q9QlghS+%8PWFXG_k)32jEKlzbAAas8;58gk-Qzz zOvs?i&Scu>TA$ZF&3xau57&o`Ps=KF?V-55Va`mkn}G#ogFj(Yu+m0X z({02jtWD z%R#~Aem*)_msT5w^y^NLNVKnVVcv*ooufX(4Y>kpJ5r)FQ5qwBk67IIyLc%%a~N}b zL9ANAju6}dqtYy|IBf`m!4XY$>ZoRqS zafk#&j~JC&N&2o|Bk9`Mczk>fp!k$Ze`9^?uo1Z;A2gBq}>F5K5bSv%3??4u@4mYFg0)3>!U(Uh-zulKXToSIc+HVma6c@zFl$@?$ot6i}9 zh^BFkHExkeV?@uPwV7OghzKV8^{jlZ!=#S{%C+L&BlrhT;llAAi*uD<<`!#q5+(D%oqC>t>1qsxXY=5StHE=W|9V^CiSR$Vo^qFaR zI2Kne54XotSYBSjn1g+C4sN;vK)W&a;Y-Bd9Qt$xOYU*8*UT@yNvo-_nd( z-@mQ2mXcrmAv!glw2wSysY{eE@3PP2SE`pNtzxMsU!b`3l+JUl+^_UR5vx0tJ7fd zOC8h@v-z{l+PU%@9`6ZvxmA?(=@&Zfb2Nr3Ja4`EvTQ#O2!=ABC-b4%g(~m`Sh~F% zsg>iVY)i_eHv9SelIf0>g)Ga!`79XpY6B-*@|%EhP0_JEf%&i0MusqueesttkZH8c zA~=BF5M?&O48Ueq%rL2{kjhM%)5hdyAS(qB zb~>qU41;2yRr$qvi8?$)!<|OW@i5N*EN6XB;?UHy?ttEDeqPGTQbLQ)N|~2uZ6vi2 zGoqal#;-t!y^$m>M?*%c(#s>9XSs(MQ}9q7usb%1@G?oL&ex| zwDlAG@|L{`W}7%4sa?S-A4~Zy6Pf2Fy|S6$^@ef2j zf$gDiE`5r0=|(eWx%vFbZ{qwX-}BttwX@)Z4^n;sbdY9Z?Jrq7xbWl|MDjv*V-#H! zs)1j&lxWO{*prj^&R^iOa3@Ewf=$)22@nx=xk%~*+ohXM^Aabk1IIVeD!6{n^@ofX z6$O;T7|+dExDR`a{dmcgMGrfaLYT3{1-muU!VUVx)vrK%;{_+t!4%FWx&WuFQf-~) ze%DF~qvSC1;h~>^g{1ukz7-E&{J`&Umd+p7gsA9iLHD!VH~@9=x9MT^OMF9wo|h4> zH?QonRPF=n;)ZQ>!`LGp_&P1uapEdfo~aSoyA|SH z<31}u5&hq6MvvyvPoe#r7W&V$l$mGEo2AxrG>YGe34ik4X%LUF31ZB|){4h7aC%E% z1BF?*HeWc#k9s*svmVRmawk7z^=p{*7WI_1L!LMojTv3MJD=y;FR_;0h z;2E2eAM`CP0LhMugcF_Db!C)Gp@JW^1w{{2LD|Rc!w0L zZw07U3to5_C|7%+F(?and0tENw@IFMfVFWzuQk{iSJfLD*3)m;Gl)QPQq(8XT?6EX z%HbDf((w8opIy@Aj=e^w6P=a1ySq=YX+vW+JAPK@+gx_yoOc%8dzrq0(fnX|c$9Jy z4oxVp4j9(N_ZXCQj{@H3_LT7Orpij2=E^ZBh|)31cvy63W-5@0>06`ex8sKyRXPYl zh3`zijv^Qr<2F7LXlS2tXyCAA?mWft3Rp8Cp5UCr*#q7@p8KS^#m7d)xVe8OC0SiC znuG)yWlSt>Z7so^AY{7m@cFzWG_AciJ5Aw=OFrce0MvB0Q-yEY!C*1OAOrXzc?MdT zU4f*wEkE2%tf!sX55}irJTph<4XNZFze>ln+#}`10;5LzkL%Tde8a`2en2=$ctk0! zf8>3B9=D4pFxkA`MZL+KO`||K`)$v%g53qmUO6!OTiwo%S<2Y5?IC%qcU@|&4iXBZ z{=ngw!#0d$qg7&30$I$CA5{b%X9J+vFZ{rJt0p>gx;nZVdAj6aJTcsqS+c67 zSzB6^U-RRx#Ivuc;WV)!vR^^b($dnt-AN(5w6LMONM)G@ViD0}v3tjiD>Q;w51r3f z;tg|d$QNS7Whv}+di)?HcWCW=vtu*^zB2n8h7$EMZoYS=Aj#mNCZ@b5E^F5%y3_S$<)_V z6rtE24~}^UVpo#e{zdJYkw$Kx-!*MsYbDEQT>RCMQD*hD>u;N9EiLu+GRia88Uw#D ze)5}X*Vj0gl5klY8qQ~NUyqwH>*pWS2;l5a2~!B-5dUzGC-u(fOUK;*(Qx6sdNP^E z){O8if}q946Vwpj%>P3~U|)~?8ZyGbz(Aiksf0mcWb9(>k;A1`a`0dcpJ;>RX#Gbe zGDCTL+ki^Cwvv;D*+^@dy+du|(`4D)sJ(-^laYqLcN|q-i9p<{s_rr}=A3D%DN~vT z^*#$Y7+ccf1KEX?^jB7c%;fmwXe-I0{^n*8#rRyi>hbTQaRC0ouegEF`u^l9Ss&5X z`(2t5B0?Ys+E!diHxE$7C7VpNkR-8td_ihOMpajjclD_ z5SsD1;is~Mhu|U-mY%~MV>aCY+HOL}&v~ffrVfnu39*1UN4HLq2#sCHoiYPX(Fn1- z-LTm&0k9lca|O6rFJKY?0sI}YH3k9X?{kvyCS5?1HzPOQKNNU&eSstka|)uv-n++S zbO`~!FiII`E?m=&B1DXRanat=Qqlrak1$Gha1E=0W~MPpibXH301l4h0#6DvFYV*F zq*#P8cJ1dFgwTw~q#^aXzCWIJ-M-IK;@kTQbZfA?zDMeAy~AM0$Qeo$#xr@<0o85U zSE<7eiCib8BY%B&ebju;gs0`X2a}$I!$I^~O1EL_}Rwv|BV8Lv}!2}I`X$j%RZGCt|eY4H;BlJC3hxRQ> zC^aLz#}N2&?twVR>>HB@2xJ>SF;hatc=;-{P|);`zyR29)ncp_Ri&C6*6nQX5S2fC z_#3-VAKc^x9iEe@hnouNJpaf;#RF1~ujCxnSvcH8_zuup~kfbY_r{1wA)_=%jQ3ORHjb2wL_I((( zJpPO@9pzUm5|TvU6IdeUdhvSd^wjNCZXsU?x7rr(?}`_ZN|rj0^`xeYJAWG7%e|$T z&5>k~+KBC>d6Rf9y)BDOHX608YcqN=WEiC%U61Xccu~7=vDBIzecSE1rfd<+Cj6Q* z6^}8xID#-1GeVH^A^)EFO7=u^{d1Rqk_9G0U;e6L+pEjWJK~M)Mf;naY7upP68I>L zQ9zySQn^-S*G|D)$b&6|qi8MbMkec|QH9FNtzDFx%B&TBny2f?-w+Cl_zzNu zhCZVEAV)IIynYvm$|ZJ_FL+Y$_lH3ihU$Toe4Q-ST$^W3_(Ce%$>(*^a`2I@IX|Wp zk<{VB?QU}*?a_67=(WB24jSEAX0-C&Vinq#^ujEDfkd$CGc%*s@evWdzOt>00eR2A zd5BYU--b|1LL(ksrn!^LxDYATs29G9>W!6AQ5{VY#)T47Fk8mFe5@}W;^o9PYkuTU zUq`0=+$TcA{LKhelNF#|O5UlQw>}#-;#9INv*)c)HafVZ_B5KT%%R*@%9R~7@OGR` z9a;gvTCh+)6tQ_y?*@_NqZ@f^t~vg$l-#0?12<@grQGNJebx^F+TBeA86w&vtp&2L z9}cqAMp_$AE^C#;Ze>5r8yo+)@296mEVu3RWo^?JFQ+#8+20z?m77vOV874|1?41h zot7%(EmaU+N7SB|&QkJclxbn0!;aV7?o`|}`bMjZQ0Aj_O@D_^vZ;jDC%9HJ^{1X! zAJJ1O;F!6L)30ebQlZ7uC(FA>fgzq?RW$b}Of3`Xwp8y29y#=*k|>L{G%ZTth{}S1 zx5*NRs-)#1h%r8bgr*< zI7tm@*hQV54GO)JBTk@q7ZOz@`rOfWauz5oi*|8HvM9utVbZX)SPQ?A-c+7C3$5Vy zX-hNbLOBa#DK!mEfvc$lNhd|NYrSXrHrIKwFE)5 z%Gnf5r-j-JsxIkLh3|>>OUs^6YVxKzb-{4^Vf>Z9;xL$ukvfn?Lxdbh^1g{AB}~xw z!FZ7GQF7=jsgA)YHc z3ilRP7&+75FW}d}a%PQa96;el<`vFS-8B&hFHFnS^*?Uu-@bAwwGz6(DFK%QhOC9Y z7CZxg{gL3wly9173c<=v7D!#qsK67c=g@Ve{rk2lhYe{LUq#L#06#(e7qCalDyR_KD59dkdA}-cDTdX`%LpQ1#Tc-GJ#-0d z__qLI)16FZde;?w=lwA?r5eBSd@X0#DEWgAWh=eO_ zEPehbY{DZ%Gx_g z=TG+8@T5b}S5e9&a~=fR<@r}s70?!wzUOv1qKT=~x(=~A-)8;*0?@p}mU6+bs8T#; zUaPHyxZO297+9GNc+@S1Q_N7A&H~cC7Ah_Gw0=U1b8pAR9xWdet$cG>#xm)ywQ`() z!nD1R=h1M9Tn_#{;E}z!roU*h*LY8sqiotz8NUB6$DwgV<}mA+K6=JAmg&GtL?W5} z;)=ilvY>gxGFz%k)-Fy_Ujc5ZIP5$)+1aSV{^Q)Fxp<;}cD)j7PzvLhGhF&}&QM;5 zB8IB^x}Vcr$mEHeS7Ter2Ym~bai?b0&&a|>W!)SYhl_QZ6tCN?OcE25tB154F6gBM zz(EjRyfb&&;A@M2zgt&c=7FWrnV{xQU2|GJYSW-|bji|hPE~bT!x#bVyD29-4K0c) zjPlEB$6{plLx+dOD)fx{Ts~;>*JFn!^taR;*xNwHaMLwa2Z>bu22SY8lQ%0oZq-;v z?t~W2Ia+brgEftVt{H)~54q?Pec4~?8^7NSt6T)S)9y?pW)B7 znQq%7c1Vw>m#3Yz+5Y_fD?P9@V3R-4WtpH)J!YAzqiO4H(3#%L`})*d?rdN11x~BM zqC**USMp4HbF&}{e#=(2^N$;~%e`mF~d=B>Rn45%+T*OVTP*ZqS z*U)Rq3hMl?)kO*MwOV#TU9cg^^SwK3`!3t-$)Cnwp;zB?4%Uy%0Tg$}uhfA~+7w-^$4cP%ENIaPbF_+I@H?FdhHl^bRb<$kEX-M<;H$yPd#Jj`-OUX&NO|v$gic_28_^gR z3;M3?dG%n$l`v9Wbt5=HvYRtMsNaMm#e-N{Iu0vgnR>L77tz+i*frMii40cN-EPZS zQzBwE+nfDz(5ZNzt>r>#_+Dk77aiZf;B1{GeGLfobRP;e?2J8v|J56HQI0}5<=dH& zGj({STjKg#;Wld@+c}NGx_=CXJuks^yMFto+hbtSR~KUgl#kXM##5>t!c)5)*c0X6 zecfn!$4%8$739G!#5dV^40&rXj8R4#4pA*rnhORV@U9$-vjwE@&WEr z{bI3u!@UcX@8S*rQTzgYlV+W!TeU;g&!@n~uY;Y(^kMyW9prt{L)^8FgaOoBKnHkL zyo-*sI++06TgDsrcY@bu84pCepf^l2kq!7d(FKt8XnnWCHP5Bo6V~P$^5-4=cF5FG z)YLItj<^Sqi-ZT@ixdQ)E~2h4uhFl3mq<^-n+DfRn<&@Nn+eF?BRL}`ep8ZN)u0=(mr9*vACR7!opkN+THrbhg5%sT zTra+=PE{Z}Yc1HWvIp3kev`nPjae3c(pg5EQYPT8iU%CF1@%Pj=TX|btseOvAg-zp z^qT=|kbeA!eAHZP1WrD1&sXO;f?ac7(6{`pA7GtLCJ2l6AzcUc9*|Gx==zb#|`g@1e^Gyi}*fB7Az zzf8&}zVZ*N^OxUY{{xe-Gk=mIoPUrO&QGF+{V&GC#Qce|urdCHZ2nR>f7z9PD4ajY z$tOwkhjU^1i__Wc@qnAN1yLZvUV+f3^QXZ`l4>#h-|O8KHmB8`eKznE#SBe;4u}lQsVXrTN>F z^?yNWnE$j7{x6i~Z^KAgBS$?$Jx9I&?pCTMEAy{bPK3;yjQ<0v;o*0+)iba#awOFM z?AL3}OMKDRMNDXB$V;rsD$6KqD{N$HCgE;xr06cEWZ-UL!1)Og^YOTHxmwv;eTE@) zwX(E!;Bw_9*89`tm+RC1<7OZx{5!$KEQ!PaM~f(D<`=_kTM-@hMiGKa>C82|Nsc0{>eg|61I?dH!c=|7_Ez#^+A` zYsvp-YOepDn=6;BqNKd8jGn%crLHjxKu_G*dqBGLh=V0aJ zWMu?!80d;x>*-q>8S**WI~o1W_CJ^SyF35)(?7pFLt}4A~6~8JU<2^;y{e!~Fgg;Xlkz z(7^HYYx|iY8^Fkzk(t?uj+uj%m5#+gpPf#R1zSDl82@38 z!k=A!Kbv$5nK?S}G5&`+{JRFs|MhA4*FiQk`0vAF>tt{F_pgJY0fW&$O~f5O&kNJv zvutR0)B0I$rfH4l#WeEeX)_p6Om-W5Revb z^5s0G+LbQrE%%ogD_UB%tdGE(*V$e>2yx=rt-7CYe*-GX$!>I$Y==9ZSmXy5K(E|g*;eYOyL}IIF!eMVHH8Pod zIebsZKRFpY;ML$?UNt5(cbAa4TX3V>Z7M={cQV0wmliSuS|E>EuA2h|Nl$VD{LqMX zEwz>C67bS>xSZFa4XN5lMZH|@u4;Ex)B_b2uFkG%cU9B_6&0?|uFATruuqoB9zAyH$9*bXBZ2vT)Y3Clb>vyf6AB%8gt^0M9ZCr-~8gw z(H%}{*S_qmoZg9a}TV~ChLNp;~l#~k($eqy6vsoFUZ@*n-|8L z=X;iBVoP%mEzR#AoS7b+>l&Q3Ez}@MH5*6ba&9u1dLIZC*^5@^@0Q zC}SGD#XEPE$P@)8hD~M|@NlSH#+VKncFJW!$Oe+BI?PL3IfJ44gn?}lCPVe9vWn_c zWdq9;fpt0bi_I3u_=7l9okLoZa+_7tfK^g1(-=eT!chauxQunVj>kh*I371e5HgjJ zX@pECWCkHKu?!x(qB#T)5V4GyYn91Qjutcy7yeH~E_b1)fmFxca98w3Xh)%5gf4B;&KEBBRC-T!MX$o zBRCX-17ajp9}#mgYCp&0Ybdb4*YNJvLJyHy|#jqgW3#QJlk70c)$g z4Xzr%i)1L4;t&qmV@HsI-ze&4(veu)O1qps-_{{S{jX0Rg)D!Obfyp$!xLWvqT;r; tt_Qna&aFsUb~*nK+dRE#YYMVVf=Qm-$*ez@Z! literal 0 HcmV?d00001 diff --git a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs similarity index 76% rename from dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs rename to dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs index d7d4a0471b01..bc5bee5249e5 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs @@ -9,7 +9,7 @@ namespace GettingStarted; /// Demonstrate creation of and /// eliciting its response to three explicit user messages. /// -public class Step1_Agent(ITestOutputHelper output) : BaseTest(output) +public class Step01_Agent(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ParrotName = "Parrot"; private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; @@ -37,15 +37,15 @@ public async Task UseSingleChatCompletionAgentAsync() // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) { - chat.Add(content); + chat.Add(response); - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs similarity index 76% rename from dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs rename to dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs index 7946adc7f687..29394991dcc4 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs @@ -11,7 +11,7 @@ namespace GettingStarted; /// Demonstrate creation of with a , /// and then eliciting its response to explicit user messages. /// -public class Step2_Plugins(ITestOutputHelper output) : BaseTest(output) +public class Step02_Plugins(ITestOutputHelper output) : BaseAgentsTest(output) { private const string HostName = "Host"; private const string HostInstructions = "Answer questions about the menu."; @@ -45,37 +45,34 @@ public async Task UseChatCompletionWithPluginAgentAsync() // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) { - chat.Add(content); + chat.Add(response); - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } - public sealed class MenuPlugin + private sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; [KernelFunction, Description("Provides the price of the requested menu item.")] public string GetItemPrice( [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } + string menuItem) => + "$9.99"; } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs similarity index 86% rename from dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs rename to dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs index 5d0c185f95f5..1ada85d512f3 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs @@ -11,7 +11,7 @@ namespace GettingStarted; /// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum /// number of agent interactions. /// -public class Step3_Chat(ITestOutputHelper output) : BaseTest(output) +public class Step03_Chat(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -74,16 +74,16 @@ public async Task UseAgentGroupChatWithTwoAgentsAsync() }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs similarity index 85% rename from dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs rename to dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs index d71b6ae26767..f97c6e733421 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs @@ -10,7 +10,7 @@ namespace GettingStarted; /// Demonstrate usage of and /// to manage execution. /// -public class Step4_KernelFunctionStrategies(ITestOutputHelper output) : BaseTest(output) +public class Step04_KernelFunctionStrategies(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -64,16 +64,17 @@ public async Task UseKernelFunctionStrategiesWithAgentGroupChatAsync() KernelFunction selectionFunction = KernelFunctionFactory.CreateFromPrompt( $$$""" - Your job is to determine which participant takes the next turn in a conversation according to the action of the most recent participant. + Determine which participant takes the next turn in a conversation based on the the most recent participant. State only the name of the participant to take the next turn. + No participant should take more than one turn in a row. Choose only from these participants: - {{{ReviewerName}}} - {{{CopyWriterName}}} Always follow these rules when selecting the next participant: - - After {{{CopyWriterName}}} replies, it is {{{ReviewerName}}}'s turn. - - After {{{ReviewerName}}} provides feedback, it is {{{CopyWriterName}}}'s turn. + - After {{{CopyWriterName}}}, it is {{{ReviewerName}}}'s turn. + - After {{{ReviewerName}}}, it is {{{CopyWriterName}}}'s turn. History: {{$history}} @@ -117,15 +118,15 @@ State only the name of the participant to take the next turn. }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent responese in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(responese); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs similarity index 79% rename from dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs rename to dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs index 20ad4c2096d4..8806c7d3b62d 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs @@ -10,14 +10,14 @@ namespace GettingStarted; /// /// Demonstrate parsing JSON response. /// -public class Step5_JsonResult(ITestOutputHelper output) : BaseTest(output) +public class Step05_JsonResult(ITestOutputHelper output) : BaseAgentsTest(output) { private const int ScoreCompletionThreshold = 70; private const string TutorName = "Tutor"; private const string TutorInstructions = """ - Think step-by-step and rate the user input on creativity and expressivness from 1-100. + Think step-by-step and rate the user input on creativity and expressiveness from 1-100. Respond in JSON format with the following JSON schema: @@ -60,19 +60,20 @@ public async Task UseKernelFunctionStrategiesWithJsonResultAsync() // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + this.WriteAgentChatMessage(response); + + Console.WriteLine($"[IS COMPLETED: {chat.IsComplete}]"); } } } - private record struct InputScore(int score, string notes); + private record struct WritingScore(int score, string notes); private sealed class ThresholdTerminationStrategy : TerminationStrategy { @@ -80,7 +81,7 @@ protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyLi { string lastMessageContent = history[history.Count - 1].Content ?? string.Empty; - InputScore? result = JsonResultTranslator.Translate(lastMessageContent); + WritingScore? result = JsonResultTranslator.Translate(lastMessageContent); return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold); } diff --git a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs similarity index 65% rename from dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs rename to dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs index 21af5db70dce..a0d32f8cefba 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs @@ -3,23 +3,19 @@ using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.ChatCompletion; -using Resources; namespace GettingStarted; /// /// Demonstrate creation of an agent via dependency injection. /// -public class Step6_DependencyInjection(ITestOutputHelper output) : BaseTest(output) +public class Step06_DependencyInjection(ITestOutputHelper output) : BaseAgentsTest(output) { - private const int ScoreCompletionThreshold = 70; - private const string TutorName = "Tutor"; private const string TutorInstructions = """ - Think step-by-step and rate the user input on creativity and expressivness from 1-100. + Think step-by-step and rate the user input on creativity and expressiveness from 1-100. Respond in JSON format with the following JSON schema: @@ -80,50 +76,27 @@ public async Task UseDependencyInjectionToCreateAgentAsync() // Local function to invoke agent and display the conversation messages. async Task WriteAgentResponse(string input) { - Console.WriteLine($"# {AuthorRole.User}: {input}"); + ChatMessageContent message = new(AuthorRole.User, input); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agentClient.RunDemoAsync(input)) + await foreach (ChatMessageContent response in agentClient.RunDemoAsync(message)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } private sealed class AgentClient([FromKeyedServices(TutorName)] ChatCompletionAgent agent) { - private readonly AgentGroupChat _chat = - new() - { - ExecutionSettings = - new() - { - // Here a TerminationStrategy subclass is used that will terminate when - // the response includes a score that is greater than or equal to 70. - TerminationStrategy = new ThresholdTerminationStrategy() - } - }; - - public IAsyncEnumerable RunDemoAsync(string input) - { - // Create a chat for agent interaction. + private readonly AgentGroupChat _chat = new(); - this._chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + public IAsyncEnumerable RunDemoAsync(ChatMessageContent input) + { + this._chat.AddChatMessage(input); return this._chat.InvokeAsync(agent); } } - private record struct InputScore(int score, string notes); - - private sealed class ThresholdTerminationStrategy : TerminationStrategy - { - protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken) - { - string lastMessageContent = history[history.Count - 1].Content ?? string.Empty; - - InputScore? result = JsonResultTranslator.Translate(lastMessageContent); - - return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold); - } - } + private record struct WritingScore(int score, string notes); } diff --git a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs similarity index 86% rename from dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs rename to dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs index 1ab559e668fb..3a48d407dea9 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs @@ -8,13 +8,13 @@ namespace GettingStarted; /// -/// A repeat of with logging enabled via assignment +/// A repeat of with logging enabled via assignment /// of a to . /// /// /// Samples become super noisy with logging always enabled. /// -public class Step7_Logging(ITestOutputHelper output) : BaseTest(output) +public class Step07_Logging(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -81,16 +81,16 @@ public async Task UseLoggerFactoryWithAgentGroupChatAsync() }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs similarity index 57% rename from dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs rename to dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs index d9e9760e3fa6..ba4ab065c2a6 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs @@ -8,36 +8,35 @@ namespace GettingStarted; /// -/// This example demonstrates that outside of initialization (and cleanup), using -/// is no different from -/// even with with a . +/// This example demonstrates similarity between using +/// and (see: Step 2). /// -public class Step8_OpenAIAssistant(ITestOutputHelper output) : BaseTest(output) +public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output) { private const string HostName = "Host"; private const string HostInstructions = "Answer questions about the menu."; [Fact] - public async Task UseSingleOpenAIAssistantAgentAsync() + public async Task UseSingleAssistantAgentAsync() { // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + clientProvider: this.GetClientProvider(), + new(this.Model) { Instructions = HostInstructions, Name = HostName, - ModelId = this.Model, + Metadata = AssistantSampleMetadata, }); // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). KernelPlugin plugin = KernelPluginFactory.CreateFromType(); agent.Kernel.Plugins.Add(plugin); - // Create a thread for the agent interaction. - string threadId = await agent.CreateThreadAsync(); + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); // Respond to user input try @@ -56,45 +55,32 @@ await OpenAIAssistantAgent.CreateAsync( // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - await agent.AddChatMessageAsync(threadId, new ChatMessageContent(AuthorRole.User, input)); + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in agent.InvokeAsync(threadId)) + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) { - if (content.Role != AuthorRole.Tool) - { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } + this.WriteAgentChatMessage(response); } } } private sealed class MenuPlugin { - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - [KernelFunction, Description("Provides a list of specials from the menu.")] [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; [KernelFunction, Description("Provides the price of the requested menu item.")] public string GetItemPrice( [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } + string menuItem) => + "$9.99"; } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs new file mode 100644 index 000000000000..62845f2c4366 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; + +namespace GettingStarted; + +/// +/// Demonstrate providing image input to . +/// +public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output) +{ + /// + /// Azure currently only supports message of type=text. + /// + protected override bool ForceOpenAI => true; + + [Fact] + public async Task UseSingleAssistantAgentAsync() + { + // Define the agent + OpenAIClientProvider provider = this.GetClientProvider(); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + provider, + new(this.Model) + { + Metadata = AssistantSampleMetadata, + }); + + // Upload an image + await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!; + string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg"); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + try + { + // Refer to public image by url + await InvokeAgentAsync(CreateMessageWithImageUrl("Describe this image.", "https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg")); + await InvokeAgentAsync(CreateMessageWithImageUrl("What are is the main color in this image?", "https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg")); + // Refer to uploaded image by file-id. + await InvokeAgentAsync(CreateMessageWithImageReference("Is there an animal in this image?", fileId)); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + await provider.Client.GetFileClient().DeleteFileAsync(fileId); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(ChatMessageContent message) + { + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } + + private ChatMessageContent CreateMessageWithImageUrl(string input, string url) + => new(AuthorRole.User, [new TextContent(input), new ImageContent(new Uri(url))]); + + private ChatMessageContent CreateMessageWithImageReference(string input, string fileId) + => new(AuthorRole.User, [new TextContent(input), new FileReferenceContent(fileId)]); +} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs similarity index 50% rename from dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs rename to dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs index 75b237489025..1205771d66be 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs @@ -1,34 +1,31 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -namespace Agents; +namespace GettingStarted; /// /// Demonstrate using code-interpreter on . /// -public class OpenAIAssistant_CodeInterpreter(ITestOutputHelper output) : BaseTest(output) +public class Step10_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output) { - protected override bool ForceOpenAI => true; - [Fact] - public async Task UseCodeInterpreterToolWithOpenAIAssistantAgentAsync() + public async Task UseCodeInterpreterToolWithAssistantAgentAsync() { // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + clientProvider: this.GetClientProvider(), + new(this.Model) { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, }); - // Create a chat for agent interaction. - AgentGroupChat chat = new(); + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); // Respond to user input try @@ -37,19 +34,20 @@ await OpenAIAssistantAgent.CreateAsync( } finally { + await agent.DeleteThreadAsync(threadId); await agent.DeleteAsync(); } // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); - await foreach (var content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs new file mode 100644 index 000000000000..70985d0fc27b --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; +using OpenAI.VectorStores; +using Resources; + +namespace GettingStarted; + +/// +/// Demonstrate using code-interpreter on . +/// +public class Step11_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseFileSearchToolWithAssistantAgentAsync() + { + // Define the agent + OpenAIClientProvider provider = this.GetClientProvider(); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + new(this.Model) + { + EnableFileSearch = true, + Metadata = AssistantSampleMetadata, + }); + + // Upload file - Using a table of fictional employees. + FileClient fileClient = provider.Client.GetFileClient(); + await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!; + OpenAIFileInfo fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants); + + // Create a vector-store + VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient(); + VectorStore vectorStore = + await vectorStoreClient.CreateVectorStoreAsync( + new VectorStoreCreationOptions() + { + FileIds = [fileInfo.Id], + Metadata = { { AssistantSampleMetadataKey, bool.TrueString } } + }); + + // Create a thread associated with a vector-store for the agent conversation. + string threadId = + await agent.CreateThreadAsync( + new OpenAIThreadCreationOptions + { + VectorStoreId = vectorStore.Id, + Metadata = AssistantSampleMetadata, + }); + + // Respond to user input + try + { + await InvokeAgentAsync("Who is the youngest employee?"); + await InvokeAgentAsync("Who works in sales?"); + await InvokeAgentAsync("I have a customer request, who can help me?"); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(CancellationToken.None); + await vectorStoreClient.DeleteVectorStoreAsync(vectorStore); + await fileClient.DeleteFileAsync(fileInfo.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/samples/LearnResources/LearnResources.csproj b/dotnet/samples/LearnResources/LearnResources.csproj index d210f8effa91..d639fc8a0cee 100644 --- a/dotnet/samples/LearnResources/LearnResources.csproj +++ b/dotnet/samples/LearnResources/LearnResources.csproj @@ -51,6 +51,7 @@ + @@ -68,6 +69,6 @@ - + \ No newline at end of file diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs index a56e6591f8ad..d957358cac77 100644 --- a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs +++ b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs @@ -45,25 +45,11 @@ public async Task RunAsync() .Build(); // - // You could instead create a kernel with a legacy Azure OpenAI text completion service - // - kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration(textModelId, endpoint, apiKey) - .Build(); - // - // You can also create a kernel with a (non-Azure) OpenAI chat completion service // kernel = Kernel.CreateBuilder() .AddOpenAIChatCompletion(openAImodelId, openAIapiKey) .Build(); // - - // Or a kernel with a legacy OpenAI text completion service - // - kernel = Kernel.CreateBuilder() - .AddOpenAITextGeneration(openAItextModelId, openAIapiKey) - .Build(); - // } } diff --git a/dotnet/src/Agents/Abstractions/AgentChannel.cs b/dotnet/src/Agents/Abstractions/AgentChannel.cs index 34f7a8030896..3c239360468d 100644 --- a/dotnet/src/Agents/Abstractions/AgentChannel.cs +++ b/dotnet/src/Agents/Abstractions/AgentChannel.cs @@ -40,6 +40,10 @@ public abstract class AgentChannel /// The agent actively interacting with the chat. /// The to monitor for cancellation requests. The default is . /// Asynchronous enumeration of messages. + /// + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( Agent agent, CancellationToken cancellationToken = default); @@ -68,6 +72,10 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent /// The agent actively interacting with the chat. /// The to monitor for cancellation requests. The default is . /// Asynchronous enumeration of messages. + /// + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( TAgent agent, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs index cdc46024ece7..6813e98f4c1d 100644 --- a/dotnet/src/Agents/Abstractions/AgentChat.cs +++ b/dotnet/src/Agents/Abstractions/AgentChat.cs @@ -308,7 +308,7 @@ private void ClearActivitySignal() /// The activity signal is used to manage ability and visibility for taking actions based /// on conversation history. /// - private void SetActivityOrThrow() + protected void SetActivityOrThrow() { // Note: Interlocked is the absolute lightest synchronization mechanism available in dotnet. int wasActive = Interlocked.CompareExchange(ref this._isActive, 1, 0); diff --git a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs index c7123abf9b71..dfd33bd53299 100644 --- a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs +++ b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs @@ -13,11 +13,13 @@ internal sealed class AggregatorChannel(AgentChat chat) : AgentChannel protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken = default) { return this._chat.GetChatMessagesAsync(cancellationToken); } + /// protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(AggregatorAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) { ChatMessageContent? lastMessage = null; diff --git a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs index 314d68ce8cd8..b971fe2ce8d4 100644 --- a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs +++ b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs @@ -61,7 +61,7 @@ public static partial void LogAgentChatAddingMessages( [LoggerMessage( EventId = 0, Level = LogLevel.Information, - Message = "[{MethodName}] Adding Messages: {MessageCount}.")] + Message = "[{MethodName}] Added Messages: {MessageCount}.")] public static partial void LogAgentChatAddedMessages( this ILogger logger, string methodName, diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs index 212c56038484..87e0e9c2a7cb 100644 --- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs +++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs @@ -27,7 +27,7 @@ public override async IAsyncEnumerable InvokeAsync( kernel ??= this.Kernel; arguments ??= this.Arguments; - (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments); + (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments); ChatHistory chat = this.SetupAgentChatHistory(history); @@ -54,7 +54,7 @@ await chatCompletionService.GetChatMessageContentsAsync( history.Add(message); } - foreach (ChatMessageContent message in messages ?? []) + foreach (ChatMessageContent message in messages) { message.AuthorName = this.Name; @@ -72,7 +72,7 @@ public override async IAsyncEnumerable InvokeStream kernel ??= this.Kernel; arguments ??= this.Arguments; - (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments); + (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments); ChatHistory chat = this.SetupAgentChatHistory(history); @@ -107,7 +107,7 @@ public override async IAsyncEnumerable InvokeStream } } - private (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments) + internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments) { // Need to provide a KernelFunction to the service selector as a container for the execution-settings. KernelFunction nullPrompt = KernelFunctionFactory.CreateFromPrompt("placeholder", arguments?.ExecutionSettings?.Values); diff --git a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs index a45bfa57011d..8c2f022830d1 100644 --- a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs +++ b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs @@ -80,7 +80,7 @@ Provide a concise and complete summarizion of the entire dialog that does not ex IEnumerable summarizedHistory = history.Extract( this.UseSingleSummary ? 0 : insertionPoint, - truncationIndex, + truncationIndex - 1, (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); try @@ -154,7 +154,9 @@ public override bool Equals(object? obj) ChatHistorySummarizationReducer? other = obj as ChatHistorySummarizationReducer; return other != null && this._thresholdCount == other._thresholdCount && - this._targetCount == other._targetCount; + this._targetCount == other._targetCount && + this.UseSingleSummary == other.UseSingleSummary && + string.Equals(this.SummarizationInstructions, other.SummarizationInstructions, StringComparison.Ordinal); } /// diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj index 222ea5c5be88..a5a4cde76d6f 100644 --- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj +++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj @@ -19,6 +19,7 @@ + @@ -32,7 +33,7 @@ - + diff --git a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs index cd4e80c3abf1..895482927515 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs index 9665fb680498..c4acca58770f 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs @@ -2,7 +2,7 @@ using System; using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI.Assistants; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; @@ -13,9 +13,8 @@ internal static class KernelFunctionExtensions /// /// The source function /// The plugin name - /// The delimiter character /// An OpenAI tool definition - public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName, string delimiter) + public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName) { var metadata = function.Metadata; if (metadata.Parameters.Count > 0) @@ -47,10 +46,17 @@ public static FunctionToolDefinition ToToolDefinition(this KernelFunction functi required, }; - return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description, BinaryData.FromObjectAsJson(spec)); + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName)) + { + Description = function.Description, + Parameters = BinaryData.FromObjectAsJson(spec) + }; } - return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description); + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName)) + { + Description = function.Description + }; } private static string ConvertType(Type? type) diff --git a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs similarity index 87% rename from dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs rename to dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs index 084e533fe757..d017fb403f23 100644 --- a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs @@ -2,7 +2,7 @@ using Azure.Core; using Azure.Core.Pipeline; -namespace Microsoft.SemanticKernel.Agents.OpenAI.Azure; +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; /// /// Helper class to inject headers into Azure SDK HTTP pipeline diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs new file mode 100644 index 000000000000..4c31a1bcf291 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating based on . +/// Also able to produce . +/// +/// +/// Improves testability. +/// +internal static class AssistantMessageFactory +{ + /// + /// Produces based on . + /// + /// The message content. + public static MessageCreationOptions CreateOptions(ChatMessageContent message) + { + MessageCreationOptions options = new(); + + if (message.Metadata != null) + { + foreach (var metadata in message.Metadata) + { + options.Metadata.Add(metadata.Key, metadata.Value?.ToString() ?? string.Empty); + } + } + + return options; + } + + /// + /// Translates into enumeration of . + /// + /// The message content. + public static IEnumerable GetMessageContents(ChatMessageContent message) + { + foreach (KernelContent content in message.Items) + { + if (content is TextContent textContent) + { + yield return MessageContent.FromText(content.ToString()); + } + else if (content is ImageContent imageContent) + { + if (imageContent.Uri != null) + { + yield return MessageContent.FromImageUrl(imageContent.Uri); + } + else if (string.IsNullOrWhiteSpace(imageContent.DataUri)) + { + yield return MessageContent.FromImageUrl(new(imageContent.DataUri!)); + } + } + else if (content is FileReferenceContent fileContent) + { + yield return MessageContent.FromImageFileId(fileContent.FileId); + } + } + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs new file mode 100644 index 000000000000..981c646254af --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating definition. +/// +/// +/// Improves testability. +/// +internal static class AssistantRunOptionsFactory +{ + /// + /// Produce by reconciling and . + /// + /// The assistant definition + /// The run specific options + public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, OpenAIAssistantInvocationOptions? invocationOptions) + { + int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount); + + RunCreationOptions options = + new() + { + MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens), + MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens), + ModelOverride = invocationOptions?.ModelName, + NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP), + ParallelToolCallsEnabled = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled), + ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null, + Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature), + TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null, + }; + + if (invocationOptions?.Metadata != null) + { + foreach (var metadata in invocationOptions.Metadata) + { + options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty); + } + } + + return options; + } + + private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct + => + setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ? + setting.Value : + null; +} diff --git a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs similarity index 70% rename from dotnet/src/Agents/OpenAI/AssistantThreadActions.cs rename to dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs index cfc7a905cfc7..06c49f7a1905 100644 --- a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -1,4 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. +using System.ClientModel; using System.Collections.Generic; using System.Linq; using System.Net; @@ -7,19 +8,18 @@ using System.Threading; using System.Threading.Tasks; using Azure; -using Azure.AI.OpenAI.Assistants; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI; +using OpenAI.Assistants; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; /// /// Actions associated with an Open Assistant thread. /// internal static class AssistantThreadActions { - private const string FunctionDelimiter = "-"; - private static readonly HashSet s_pollingStatuses = [ RunStatus.Queued, @@ -34,6 +34,45 @@ internal static class AssistantThreadActions RunStatus.Cancelled, ]; + /// + /// Create a new assistant thread. + /// + /// The assistant client + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default) + { + ThreadCreationOptions createOptions = + new() + { + ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds), + }; + + if (options?.Messages is not null) + { + foreach (ChatMessageContent message in options.Messages) + { + ThreadInitializationMessage threadMessage = new( + role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant, + content: AssistantMessageFactory.GetMessageContents(message)); + createOptions.InitialMessages.Add(threadMessage); + } + } + + if (options?.Metadata != null) + { + foreach (KeyValuePair item in options.Metadata) + { + createOptions.Metadata[item.Key] = item.Value; + } + } + + AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false); + + return thread.Id; + } + /// /// Create a message in the specified thread. /// @@ -42,18 +81,21 @@ internal static class AssistantThreadActions /// The message to add /// The to monitor for cancellation requests. The default is . /// if a system message is present, without taking any other action - public static async Task CreateMessageAsync(AssistantsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) + public static async Task CreateMessageAsync(AssistantClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) { if (message.Items.Any(i => i is FunctionCallContent)) { return; } + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + await client.CreateMessageAsync( threadId, - message.Role.ToMessageRole(), - message.Content, - cancellationToken: cancellationToken).ConfigureAwait(false); + message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant, + AssistantMessageFactory.GetMessageContents(message), + options, + cancellationToken).ConfigureAwait(false); } /// @@ -63,18 +105,16 @@ await client.CreateMessageAsync( /// The thread identifier /// The to monitor for cancellation requests. The default is . /// Asynchronous enumeration of messages. - public static async IAsyncEnumerable GetMessagesAsync(AssistantsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) + public static async IAsyncEnumerable GetMessagesAsync(AssistantClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) { Dictionary agentNames = []; // Cache agent names by their identifier - PageableList messages; - - string? lastId = null; - do + await foreach (PageResult page in client.GetMessagesAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false)) { - messages = await client.GetMessagesAsync(threadId, limit: 100, ListSortOrder.Descending, after: lastId, null, cancellationToken).ConfigureAwait(false); - foreach (ThreadMessage message in messages) + foreach (var message in page.Values) { + AuthorRole role = new(message.Role.ToString()); + string? assistantName = null; if (!string.IsNullOrWhiteSpace(message.AssistantId) && !agentNames.TryGetValue(message.AssistantId, out assistantName)) @@ -94,20 +134,19 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist { yield return content; } - - lastId = message.Id; } } - while (messages.HasMore); } /// /// Invoke the assistant on the specified thread. + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. /// /// The assistant agent to interact with the thread. /// The assistant client /// The thread identifier - /// Config to utilize when polling for run state. + /// Options to utilize for the invocation /// The logger to utilize (might be agent or channel scoped) /// The plugins and other state. /// Optional arguments to pass to the agents's invocation, including any . @@ -118,9 +157,9 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist /// public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( OpenAIAssistantAgent agent, - AssistantsClient client, + AssistantClient client, string threadId, - OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration, + OpenAIAssistantInvocationOptions? invocationOptions, ILogger logger, Kernel kernel, KernelArguments? arguments, @@ -131,19 +170,15 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}."); } - ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name, FunctionDelimiter)))]; - logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); - CreateRunOptions options = - new(agent.Id) - { - OverrideInstructions = agent.Instructions, - OverrideTools = tools, - }; + ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, invocationOptions); + + options.ToolsOverride.AddRange(tools); - // Create run - ThreadRun run = await client.CreateRunAsync(threadId, options, cancellationToken).ConfigureAwait(false); + ThreadRun run = await client.CreateRunAsync(threadId, agent.Id, options, cancellationToken).ConfigureAwait(false); logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); @@ -154,7 +189,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist do { // Poll run and steps until actionable - PageableList steps = await PollRunStatusAsync().ConfigureAwait(false); + await PollRunStatusAsync().ConfigureAwait(false); // Is in terminal state? if (s_terminalStatuses.Contains(run.Status)) @@ -162,13 +197,19 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); } + List steps = []; + await foreach (var page in client.GetRunStepsAsync(run).ConfigureAwait(false)) + { + steps.AddRange(page.Values); + }; + // Is tool action required? if (run.Status == RunStatus.RequiresAction) { logger.LogOpenAIAssistantProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId); // Execute functions in parallel and post results at once. - FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + FunctionCallContent[] activeFunctionSteps = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); if (activeFunctionSteps.Length > 0) { // Emit function-call content @@ -183,7 +224,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist // Process tool output ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); - await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false); + await client.SubmitToolOutputsToRunAsync(threadId, run.Id, toolOutputs, cancellationToken).ConfigureAwait(false); } logger.LogOpenAIAssistantProcessedRunSteps(nameof(InvokeAsync), activeFunctionSteps.Length, run.Id, threadId); @@ -200,26 +241,24 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist int messageCount = 0; foreach (RunStep completedStep in completedStepsToProcess) { - if (completedStep.Type.Equals(RunStepType.ToolCalls)) + if (completedStep.Type == RunStepType.ToolCalls) { - RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails; - - foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls) + foreach (RunStepToolCall toolCall in completedStep.Details.ToolCalls) { bool isVisible = false; ChatMessageContent? content = null; // Process code-interpreter content - if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter) + if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter) { - content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter); + content = GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput); isVisible = true; } // Process function result content - else if (toolCall is RunStepFunctionToolCall toolFunction) + else if (toolCall.ToolKind == RunStepToolCallKind.Function) { - FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation - content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output); + FunctionCallContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation + content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolCall.FunctionOutput); } if (content is not null) @@ -230,12 +269,10 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist } } } - else if (completedStep.Type.Equals(RunStepType.MessageCreation)) + else if (completedStep.Type == RunStepType.MessageCreation) { - RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails; - // Retrieve the message - ThreadMessage? message = await RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false); + ThreadMessage? message = await RetrieveMessageAsync(completedStep.Details.CreatedMessageId, cancellationToken).ConfigureAwait(false); if (message is not null) { @@ -260,7 +297,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run.Id, threadId); // Local function to assist in run polling (participates in method closure). - async Task> PollRunStatusAsync() + async Task PollRunStatusAsync() { logger.LogOpenAIAssistantPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId); @@ -269,7 +306,7 @@ async Task> PollRunStatusAsync() do { // Reduce polling frequency after a couple attempts - await Task.Delay(count >= 2 ? pollingConfiguration.RunPollingInterval : pollingConfiguration.RunPollingBackoff, cancellationToken).ConfigureAwait(false); + await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false); ++count; #pragma warning disable CA1031 // Do not catch general exception types @@ -286,39 +323,37 @@ async Task> PollRunStatusAsync() while (s_pollingStatuses.Contains(run.Status)); logger.LogOpenAIAssistantPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId); - - return await client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false); } // Local function to capture kernel function state for further processing (participates in method closure). IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step) { - if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails) + if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls) { - foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType()) + foreach (RunStepToolCall toolCall in step.Details.ToolCalls) { - var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter); + var nameParts = FunctionName.Parse(toolCall.FunctionName); KernelArguments functionArguments = []; - if (!string.IsNullOrWhiteSpace(toolCall.Arguments)) + if (!string.IsNullOrWhiteSpace(toolCall.FunctionArguments)) { - Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!; + Dictionary arguments = JsonSerializer.Deserialize>(toolCall.FunctionArguments)!; foreach (var argumentKvp in arguments) { functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString(); } } - var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments); + var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.ToolCallId, functionArguments); - functionSteps.Add(toolCall.Id, content); + functionSteps.Add(toolCall.ToolCallId, content); yield return content; } } } - async Task RetrieveMessageAsync(RunStepMessageCreationDetails detail, CancellationToken cancellationToken) + async Task RetrieveMessageAsync(string messageId, CancellationToken cancellationToken) { ThreadMessage? message = null; @@ -328,7 +363,7 @@ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, R { try { - message = await client.GetMessageAsync(threadId, detail.MessageCreation.MessageId, cancellationToken).ConfigureAwait(false); + message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false); } catch (RequestFailedException exception) { @@ -340,7 +375,7 @@ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, R if (retry) { - await Task.Delay(pollingConfiguration.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); + await Task.Delay(agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); } ++count; @@ -361,57 +396,58 @@ private static ChatMessageContent GenerateMessageContent(string? assistantName, AuthorName = assistantName, }; - foreach (MessageContent itemContent in message.ContentItems) + foreach (MessageContent itemContent in message.Content) { // Process text content - if (itemContent is MessageTextContent contentMessage) + if (!string.IsNullOrEmpty(itemContent.Text)) { - content.Items.Add(new TextContent(contentMessage.Text.Trim())); + content.Items.Add(new TextContent(itemContent.Text)); - foreach (MessageTextAnnotation annotation in contentMessage.Annotations) + foreach (TextAnnotation annotation in itemContent.TextAnnotations) { content.Items.Add(GenerateAnnotationContent(annotation)); } } // Process image content - else if (itemContent is MessageImageFileContent contentImage) + else if (itemContent.ImageFileId != null) { - content.Items.Add(new FileReferenceContent(contentImage.FileId)); + content.Items.Add(new FileReferenceContent(itemContent.ImageFileId)); } } return content; } - private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation) + private static AnnotationContent GenerateAnnotationContent(TextAnnotation annotation) { string? fileId = null; - if (annotation is MessageTextFileCitationAnnotation citationAnnotation) + + if (!string.IsNullOrEmpty(annotation.OutputFileId)) { - fileId = citationAnnotation.FileId; + fileId = annotation.OutputFileId; } - else if (annotation is MessageTextFilePathAnnotation pathAnnotation) + else if (!string.IsNullOrEmpty(annotation.InputFileId)) { - fileId = pathAnnotation.FileId; + fileId = annotation.InputFileId; } return new() { - Quote = annotation.Text, + Quote = annotation.TextToReplace, StartIndex = annotation.StartIndex, EndIndex = annotation.EndIndex, FileId = fileId, }; } - private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter) + private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode) { return new ChatMessageContent( AuthorRole.Assistant, [ - new TextContent(contentCodeInterpreter.Input) + new TextContent(pythonCode) ]) { AuthorName = agentName, diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs new file mode 100644 index 000000000000..6874e1d21755 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating definition. +/// +/// +/// Improves testability. +/// +internal static class AssistantToolResourcesFactory +{ + /// + /// Produces a definition based on the provided parameters. + /// + /// An optional vector-store-id for the 'file_search' tool + /// An optionallist of file-identifiers for the 'code_interpreter' tool. + public static ToolResources? GenerateToolResources(string? vectorStoreId, IReadOnlyList? codeInterpreterFileIds) + { + bool hasVectorStore = !string.IsNullOrWhiteSpace(vectorStoreId); + bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0; + + ToolResources? toolResources = null; + + if (hasVectorStore || hasCodeInterpreterFiles) + { + toolResources = + new ToolResources() + { + FileSearch = + hasVectorStore ? + new FileSearchToolResources() + { + VectorStoreIds = [vectorStoreId!], + } : + null, + CodeInterpreter = + hasCodeInterpreterFiles ? + new CodeInterpreterToolResources() + { + FileIds = (IList)codeInterpreterFileIds!, + } : + null, + }; + } + + return toolResources; + } +} diff --git a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs index bc7c8d9919f0..3a39c314c5c3 100644 --- a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs +++ b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs @@ -1,7 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI.Assistants; using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs index 6746c6c50d9a..28c8dba9e3a8 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs @@ -1,17 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Runtime.CompilerServices; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI.Assistants; -using Azure.Core; -using Azure.Core.Pipeline; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Agents.OpenAI.Azure; -using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI; +using OpenAI.Assistants; +using OpenAI.Files; namespace Microsoft.SemanticKernel.Agents.OpenAI; @@ -25,9 +24,12 @@ public sealed class OpenAIAssistantAgent : KernelAgent /// public const string CodeInterpreterMetadataKey = "code"; + internal const string OptionsMetadataKey = "__run_options"; + + private readonly OpenAIClientProvider _provider; private readonly Assistant _assistant; - private readonly AssistantsClient _client; - private readonly OpenAIAssistantConfiguration _config; + private readonly AssistantClient _client; + private readonly string[] _channelKeys; /// /// Optional arguments for the agent. @@ -38,57 +40,55 @@ public sealed class OpenAIAssistantAgent : KernelAgent public KernelArguments? Arguments { get; init; } /// - /// A list of previously uploaded file IDs to attach to the assistant. + /// The assistant definition. /// - public IReadOnlyList FileIds => this._assistant.FileIds; + public OpenAIAssistantDefinition Definition { get; private init; } /// - /// A set of up to 16 key/value pairs that can be attached to an agent, used for - /// storing additional information about that object in a structured format.Keys - /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// Set when the assistant has been deleted via . + /// An assistant removed by other means will result in an exception when invoked. /// - public IReadOnlyDictionary Metadata => this._assistant.Metadata; + public bool IsDeleted { get; private set; } /// - /// Expose predefined tools. + /// Defines polling behavior for run processing /// - internal IReadOnlyList Tools => this._assistant.Tools; + public RunPollingOptions PollingOptions { get; } = new(); /// - /// Set when the assistant has been deleted via . - /// An assistant removed by other means will result in an exception when invoked. + /// Expose predefined tools for run-processing. /// - public bool IsDeleted { get; private set; } + internal IReadOnlyList Tools => this._assistant.Tools; /// /// Define a new . /// /// The containing services, plugins, and other state for use throughout the operation. - /// Configuration for accessing the Assistants API service, such as the api-key. + /// OpenAI client provider for accessing the API service. /// The assistant definition. /// The to monitor for cancellation requests. The default is . /// An instance public static async Task CreateAsync( Kernel kernel, - OpenAIAssistantConfiguration config, + OpenAIClientProvider clientProvider, OpenAIAssistantDefinition definition, CancellationToken cancellationToken = default) { // Validate input Verify.NotNull(kernel, nameof(kernel)); - Verify.NotNull(config, nameof(config)); + Verify.NotNull(clientProvider, nameof(clientProvider)); Verify.NotNull(definition, nameof(definition)); // Create the client - AssistantsClient client = CreateClient(config); + AssistantClient client = CreateClient(clientProvider); // Create the assistant AssistantCreationOptions assistantCreationOptions = CreateAssistantCreationOptions(definition); - Assistant model = await client.CreateAssistantAsync(assistantCreationOptions, cancellationToken).ConfigureAwait(false); + Assistant model = await client.CreateAssistantAsync(definition.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false); // Instantiate the agent return - new OpenAIAssistantAgent(client, model, config) + new OpenAIAssistantAgent(model, clientProvider, client) { Kernel = kernel, }; @@ -97,79 +97,49 @@ public static async Task CreateAsync( /// /// Retrieve a list of assistant definitions: . /// - /// Configuration for accessing the Assistants API service, such as the api-key. - /// The maximum number of assistant definitions to retrieve - /// The identifier of the assistant beyond which to begin selection. + /// Configuration for accessing the API service. /// The to monitor for cancellation requests. The default is . /// An list of objects. public static async IAsyncEnumerable ListDefinitionsAsync( - OpenAIAssistantConfiguration config, - int maxResults = 100, - string? lastId = null, + OpenAIClientProvider provider, [EnumeratorCancellation] CancellationToken cancellationToken = default) { // Create the client - AssistantsClient client = CreateClient(config); - - // Retrieve the assistants - PageableList assistants; + AssistantClient client = CreateClient(provider); - int resultCount = 0; - do + // Query and enumerate assistant definitions + await foreach (var page in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false)) { - assistants = await client.GetAssistantsAsync(limit: Math.Min(maxResults, 100), ListSortOrder.Descending, after: lastId, cancellationToken: cancellationToken).ConfigureAwait(false); - foreach (Assistant assistant in assistants) + foreach (Assistant model in page.Values) { - if (resultCount >= maxResults) - { - break; - } - - resultCount++; - - yield return - new() - { - Id = assistant.Id, - Name = assistant.Name, - Description = assistant.Description, - Instructions = assistant.Instructions, - EnableCodeInterpreter = assistant.Tools.Any(t => t is CodeInterpreterToolDefinition), - EnableRetrieval = assistant.Tools.Any(t => t is RetrievalToolDefinition), - FileIds = assistant.FileIds, - Metadata = assistant.Metadata, - ModelId = assistant.Model, - }; - - lastId = assistant.Id; + yield return CreateAssistantDefinition(model); } } - while (assistants.HasMore && resultCount < maxResults); } /// /// Retrieve a by identifier. /// /// The containing services, plugins, and other state for use throughout the operation. - /// Configuration for accessing the Assistants API service, such as the api-key. + /// Configuration for accessing the API service. /// The agent identifier /// The to monitor for cancellation requests. The default is . /// An instance public static async Task RetrieveAsync( Kernel kernel, - OpenAIAssistantConfiguration config, + OpenAIClientProvider provider, string id, CancellationToken cancellationToken = default) { // Create the client - AssistantsClient client = CreateClient(config); + AssistantClient client = CreateClient(provider); // Retrieve the assistant Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false); // Instantiate the agent return - new OpenAIAssistantAgent(client, model, config) + new OpenAIAssistantAgent(model, provider, client) { Kernel = kernel, }; @@ -180,12 +150,17 @@ public static async Task RetrieveAsync( /// /// The to monitor for cancellation requests. The default is . /// The thread identifier - public async Task CreateThreadAsync(CancellationToken cancellationToken = default) - { - AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false); + public Task CreateThreadAsync(CancellationToken cancellationToken = default) + => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken); - return thread.Id; - } + /// + /// Create a new assistant thread. + /// + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default) + => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken); /// /// Create a new assistant thread. @@ -203,6 +178,25 @@ public async Task DeleteThreadAsync( return await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false); } + /// + /// Uploads an file for the purpose of using with assistant. + /// + /// The content to upload + /// The name of the file + /// The to monitor for cancellation requests. The default is . + /// The file identifier + /// + /// Use the directly for more advanced file operations. + /// + public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default) + { + FileClient client = this._provider.Client.GetFileClient(); + + OpenAIFileInfo fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false); + + return fileInfo.Id; + } + /// /// Adds a message to the specified thread. /// @@ -232,7 +226,7 @@ public IAsyncEnumerable GetThreadMessagesAsync(string thread /// /// Delete the assistant definition. /// - /// + /// The to monitor for cancellation requests. The default is . /// True if assistant definition has been deleted /// /// Assistant based agent will not be useable after deletion. @@ -258,8 +252,28 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul /// /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. /// + public IAsyncEnumerable InvokeAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken); + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// public async IAsyncEnumerable InvokeAsync( string threadId, + OpenAIAssistantInvocationOptions? options, KernelArguments? arguments = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -269,7 +283,7 @@ public async IAsyncEnumerable InvokeAsync( kernel ??= this.Kernel; arguments ??= this.Arguments; - await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, this._config.Polling, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) + await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) { if (isVisible) { @@ -282,29 +296,11 @@ public async IAsyncEnumerable InvokeAsync( protected override IEnumerable GetChannelKeys() { // Distinguish from other channel types. - yield return typeof(AgentChannel).FullName!; + yield return typeof(OpenAIAssistantChannel).FullName!; - // Distinguish between different Azure OpenAI endpoints or OpenAI services. - yield return this._config.Endpoint ?? "openai"; - - // Distinguish between different API versioning. - if (this._config.Version.HasValue) + foreach (string key in this._channelKeys) { - yield return this._config.Version.ToString()!; - } - - // Custom client receives dedicated channel. - if (this._config.HttpClient is not null) - { - if (this._config.HttpClient.BaseAddress is not null) - { - yield return this._config.HttpClient.BaseAddress.AbsoluteUri; - } - - foreach (string header in this._config.HttpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) - { - yield return header; - } + yield return key; } } @@ -313,10 +309,12 @@ protected override async Task CreateChannelAsync(CancellationToken { this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel)); - AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false); + AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false); + + this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id); OpenAIAssistantChannel channel = - new(this._client, thread.Id, this._config.Polling) + new(this._client, thread.Id) { Logger = this.LoggerFactory.CreateLogger() }; @@ -338,13 +336,16 @@ internal void ThrowIfDeleted() /// Initializes a new instance of the class. /// private OpenAIAssistantAgent( - AssistantsClient client, Assistant model, - OpenAIAssistantConfiguration config) + OpenAIClientProvider provider, + AssistantClient client) { + this._provider = provider; this._assistant = model; - this._client = client; - this._config = config; + this._client = provider.Client.GetAssistantClient(); + this._channelKeys = provider.ConfigurationKeys.ToArray(); + + this.Definition = CreateAssistantDefinition(model); this.Description = this._assistant.Description; this.Id = this._assistant.Id; @@ -352,64 +353,94 @@ private OpenAIAssistantAgent( this.Instructions = this._assistant.Instructions; } + private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model) + { + OpenAIAssistantExecutionOptions? options = null; + + if (model.Metadata.TryGetValue(OptionsMetadataKey, out string? optionsJson)) + { + options = JsonSerializer.Deserialize(optionsJson); + } + + IReadOnlyList? fileIds = (IReadOnlyList?)model.ToolResources?.CodeInterpreter?.FileIds; + string? vectorStoreId = model.ToolResources?.FileSearch?.VectorStoreIds?.SingleOrDefault(); + bool enableJsonResponse = model.ResponseFormat is not null && model.ResponseFormat == AssistantResponseFormat.JsonObject; + + return new(model.Model) + { + Id = model.Id, + Name = model.Name, + Description = model.Description, + Instructions = model.Instructions, + CodeInterpreterFileIds = fileIds, + EnableCodeInterpreter = model.Tools.Any(t => t is CodeInterpreterToolDefinition), + EnableFileSearch = model.Tools.Any(t => t is FileSearchToolDefinition), + Metadata = model.Metadata, + EnableJsonResponse = enableJsonResponse, + TopP = model.NucleusSamplingFactor, + Temperature = model.Temperature, + VectorStoreId = string.IsNullOrWhiteSpace(vectorStoreId) ? null : vectorStoreId, + ExecutionOptions = options, + }; + } + private static AssistantCreationOptions CreateAssistantCreationOptions(OpenAIAssistantDefinition definition) { AssistantCreationOptions assistantCreationOptions = - new(definition.ModelId) + new() { Description = definition.Description, Instructions = definition.Instructions, Name = definition.Name, - Metadata = definition.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value), + ToolResources = + AssistantToolResourcesFactory.GenerateToolResources( + definition.EnableFileSearch ? definition.VectorStoreId : null, + definition.EnableCodeInterpreter ? definition.CodeInterpreterFileIds : null), + ResponseFormat = definition.EnableJsonResponse ? AssistantResponseFormat.JsonObject : AssistantResponseFormat.Auto, + Temperature = definition.Temperature, + NucleusSamplingFactor = definition.TopP, }; - assistantCreationOptions.FileIds.AddRange(definition.FileIds ?? []); + if (definition.Metadata != null) + { + foreach (KeyValuePair item in definition.Metadata) + { + assistantCreationOptions.Metadata[item.Key] = item.Value; + } + } + + if (definition.ExecutionOptions != null) + { + string optionsJson = JsonSerializer.Serialize(definition.ExecutionOptions); + assistantCreationOptions.Metadata[OptionsMetadataKey] = optionsJson; + } if (definition.EnableCodeInterpreter) { - assistantCreationOptions.Tools.Add(new CodeInterpreterToolDefinition()); + assistantCreationOptions.Tools.Add(ToolDefinition.CreateCodeInterpreter()); } - if (definition.EnableRetrieval) + if (definition.EnableFileSearch) { - assistantCreationOptions.Tools.Add(new RetrievalToolDefinition()); + assistantCreationOptions.Tools.Add(ToolDefinition.CreateFileSearch()); } return assistantCreationOptions; } - private static AssistantsClient CreateClient(OpenAIAssistantConfiguration config) + private static AssistantClient CreateClient(OpenAIClientProvider config) { - AssistantsClientOptions clientOptions = CreateClientOptions(config); - - // Inspect options - if (!string.IsNullOrWhiteSpace(config.Endpoint)) - { - // Create client configured for Azure OpenAI, if endpoint definition is present. - return new AssistantsClient(new Uri(config.Endpoint), new AzureKeyCredential(config.ApiKey), clientOptions); - } - - // Otherwise, create client configured for OpenAI. - return new AssistantsClient(config.ApiKey, clientOptions); + return config.Client.GetAssistantClient(); } - private static AssistantsClientOptions CreateClientOptions(OpenAIAssistantConfiguration config) + private static IEnumerable DefineChannelKeys(OpenAIClientProvider config) { - AssistantsClientOptions options = - config.Version.HasValue ? - new(config.Version.Value) : - new(); - - options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; - options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), HttpPipelinePosition.PerCall); + // Distinguish from other channel types. + yield return typeof(AgentChannel).FullName!; - if (config.HttpClient is not null) + foreach (string key in config.ConfigurationKeys) { - options.Transport = new HttpClientTransport(config.HttpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. - options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout + yield return key; } - - return options; } } diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs index 5b4600e64542..72fbb026b05a 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs @@ -2,17 +2,18 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI.Assistants; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; /// /// A specialization for use with . /// -internal sealed class OpenAIAssistantChannel(AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration) +internal sealed class OpenAIAssistantChannel(AssistantClient client, string threadId) : AgentChannel { - private readonly AssistantsClient _client = client; + private readonly AssistantClient _client = client; private readonly string _threadId = threadId; /// @@ -31,7 +32,7 @@ protected override async Task ReceiveAsync(IEnumerable histo { agent.ThrowIfDeleted(); - return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, pollingConfiguration, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); } /// diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs deleted file mode 100644 index aa037266e7d5..000000000000 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Net.Http; -using Azure.AI.OpenAI.Assistants; - -namespace Microsoft.SemanticKernel.Agents.OpenAI; - -/// -/// Configuration to target an OpenAI Assistant API. -/// -public sealed class OpenAIAssistantConfiguration -{ - /// - /// The Assistants API Key. - /// - public string ApiKey { get; } - - /// - /// An optional endpoint if targeting Azure OpenAI Assistants API. - /// - public string? Endpoint { get; } - - /// - /// An optional API version override. - /// - public AssistantsClientOptions.ServiceVersion? Version { get; init; } - - /// - /// Custom for HTTP requests. - /// - public HttpClient? HttpClient { get; init; } - - /// - /// Defineds polling behavior for Assistant API requests. - /// - public PollingConfiguration Polling { get; } = new PollingConfiguration(); - - /// - /// Initializes a new instance of the class. - /// - /// The Assistants API Key - /// An optional endpoint if targeting Azure OpenAI Assistants API - public OpenAIAssistantConfiguration(string apiKey, string? endpoint = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - if (!string.IsNullOrWhiteSpace(endpoint)) - { - // Only verify `endpoint` when provided (AzureOAI vs OpenAI) - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - } - - this.ApiKey = apiKey; - this.Endpoint = endpoint; - } - - /// - /// Configuration and defaults associated with polling behavior for Assistant API requests. - /// - public sealed class PollingConfiguration - { - /// - /// The default polling interval when monitoring thread-run status. - /// - public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); - - /// - /// The default back-off interval when monitoring thread-run status. - /// - public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); - - /// - /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. - /// - public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); - - /// - /// The polling interval when monitoring thread-run status. - /// - public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; - - /// - /// The back-off interval when monitoring thread-run status. - /// - public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; - - /// - /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. - /// - public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; - } -} diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs index 3699e07ee1ed..7b7015aa3b4a 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs @@ -1,57 +1,112 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Agents.OpenAI; /// -/// The data associated with an assistant's definition. +/// Defines an assistant. /// public sealed class OpenAIAssistantDefinition { /// - /// Identifies the AI model (OpenAI) or deployment (AzureOAI) this agent targets. + /// Identifies the AI model targeted by the agent. /// - public string? ModelId { get; init; } + public string ModelId { get; } /// /// The description of the assistant. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Description { get; init; } /// /// The assistant's unique id. (Ignored on create.) /// - public string? Id { get; init; } + public string Id { get; init; } = string.Empty; /// /// The system instructions for the assistant to use. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Instructions { get; init; } /// /// The name of the assistant. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Name { get; init; } + /// + /// Optional file-ids made available to the code_interpreter tool, if enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? CodeInterpreterFileIds { get; init; } + /// /// Set if code-interpreter is enabled. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public bool EnableCodeInterpreter { get; init; } /// - /// Set if retrieval is enabled. + /// Set if file-search is enabled. /// - public bool EnableRetrieval { get; init; } + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } /// - /// A list of previously uploaded file IDs to attach to the assistant. + /// Set if json response-format is enabled. /// - public IEnumerable? FileIds { get; init; } + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableJsonResponse { get; init; } /// /// A set of up to 16 key/value pairs that can be attached to an agent, used for /// storing additional information about that object in a structured format.Keys /// may be up to 64 characters in length and values may be up to 512 characters in length. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public IReadOnlyDictionary? Metadata { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// Requires file-search if specified. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? VectorStoreId { get; init; } + + /// + /// Default execution options for each agent invocation. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; } + + /// + /// Initializes a new instance of the class. + /// + /// The targeted model + [JsonConstructor] + public OpenAIAssistantDefinition(string modelId) + { + Verify.NotNullOrWhiteSpace(modelId); + + this.ModelId = modelId; + } } diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs new file mode 100644 index 000000000000..074b92831c92 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Defines assistant execution options for each invocation. +/// +/// +/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options" +/// +public sealed class OpenAIAssistantExecutionOptions +{ + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs new file mode 100644 index 000000000000..0653c83a13e2 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Defines per invocation execution options that override the assistant definition. +/// +/// +/// Not applicable to usage. +/// +public sealed class OpenAIAssistantInvocationOptions +{ + /// + /// Override the AI model targeted by the agent. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ModelName { get; init; } + + /// + /// Set if code_interpreter tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableCodeInterpreter { get; init; } + + /// + /// Set if file_search tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } + + /// + /// Set if json response-format is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? EnableJsonResponse { get; init; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs new file mode 100644 index 000000000000..0b60b66fa84a --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Threading; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.SemanticKernel.Http; +using OpenAI; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Provides an for use by . +/// +public sealed class OpenAIClientProvider +{ + /// + /// Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key. + /// + private const string SingleSpaceKey = " "; + + /// + /// An active client instance. + /// + public OpenAIClient Client { get; } + + /// + /// Configuration keys required for management. + /// + internal IReadOnlyList ConfigurationKeys { get; } + + private OpenAIClientProvider(OpenAIClient client, IEnumerable keys) + { + this.Client = client; + this.ConfigurationKeys = keys.ToArray(); + } + + /// + /// Produce a based on . + /// + /// The API key + /// The service endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null) + { + Verify.NotNull(apiKey, nameof(apiKey)); + Verify.NotNull(endpoint, nameof(endpoint)); + + AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AzureOpenAIClient(endpoint, apiKey!, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// The credentials + /// The service endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null) + { + Verify.NotNull(credential, nameof(credential)); + Verify.NotNull(endpoint, nameof(endpoint)); + + AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AzureOpenAIClient(endpoint, credential, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// An optional endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? httpClient = null) + { + OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient); + return new(new OpenAIClient(SingleSpaceKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// The API key + /// An optional endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null) + { + OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient); + return new(new OpenAIClient(apiKey ?? SingleSpaceKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Directly provide a client instance. + /// + public static OpenAIClientProvider FromClient(OpenAIClient client) + { + return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]); + } + + private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient) + { + AzureOpenAIClientOptions options = new() + { + ApplicationId = HttpHeaderConstant.Values.UserAgent + }; + + ConfigureClientOptions(httpClient, options); + + return options; + } + + private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, HttpClient? httpClient) + { + OpenAIClientOptions options = new() + { + ApplicationId = HttpHeaderConstant.Values.UserAgent, + Endpoint = endpoint ?? httpClient?.BaseAddress, + }; + + ConfigureClientOptions(httpClient, options); + + return options; + } + + private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options) + { + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout + } + } + + private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue) + => + new((message) => + { + if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false) + { + message.Request.Headers.Set(headerName, headerValue); + } + }); + + private static IEnumerable CreateConfigurationKeys(Uri? endpoint, HttpClient? httpClient) + { + if (endpoint != null) + { + yield return endpoint.ToString(); + } + + if (httpClient is not null) + { + if (httpClient.BaseAddress is not null) + { + yield return httpClient.BaseAddress.AbsoluteUri; + } + + foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) + { + yield return header; + } + } + } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs new file mode 100644 index 000000000000..3f39c43d03dc --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Thread creation options. +/// +public sealed class OpenAIThreadCreationOptions +{ + /// + /// Optional file-ids made available to the code_interpreter tool, if enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? CodeInterpreterFileIds { get; init; } + + /// + /// Optional messages to initialize thread with.. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? Messages { get; init; } + + /// + /// Enables file-search if specified. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? VectorStoreId { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs new file mode 100644 index 000000000000..756ba689131c --- /dev/null +++ b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Configuration and defaults associated with polling behavior for Assistant API run processing. +/// +public sealed class RunPollingOptions +{ + /// + /// The default polling interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The default back-off interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); + + /// + /// The default number of polling iterations before using . + /// + public static int DefaultPollingBackoffThreshold { get; } = 2; + + /// + /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The polling interval when monitoring thread-run status. + /// + public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; + + /// + /// The back-off interval when monitoring thread-run status. + /// + public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; + + /// + /// The number of polling iterations before using . + /// + public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold; + + /// + /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; + + /// + /// Gets the polling interval for the specified iteration count. + /// + /// The number of polling iterations already attempted + public TimeSpan GetPollingInterval(int iterationCount) => + iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval; +} diff --git a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs index 50aa328ebc67..84558e002b4f 100644 --- a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs +++ b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs @@ -23,20 +23,26 @@ public class AgentChannelTests [Fact] public async Task VerifyAgentChannelUpcastAsync() { + // Arrange TestChannel channel = new(); + // Assert Assert.Equal(0, channel.InvokeCount); - var messages = channel.InvokeAgentAsync(new TestAgent()).ToArrayAsync(); + // Act + var messages = channel.InvokeAgentAsync(new MockAgent()).ToArrayAsync(); + // Assert Assert.Equal(1, channel.InvokeCount); + // Act await Assert.ThrowsAsync(() => channel.InvokeAgentAsync(new NextAgent()).ToArrayAsync().AsTask()); + // Assert Assert.Equal(1, channel.InvokeCount); } /// /// Not using mock as the goal here is to provide entrypoint to protected method. /// - private sealed class TestChannel : AgentChannel + private sealed class TestChannel : AgentChannel { public int InvokeCount { get; private set; } @@ -44,7 +50,7 @@ private sealed class TestChannel : AgentChannel => base.InvokeAsync(agent, cancellationToken); #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(TestAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) + protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(MockAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously { this.InvokeCount++; @@ -68,18 +74,5 @@ protected internal override Task ResetAsync(CancellationToken cancellationToken } } - private sealed class NextAgent : TestAgent; - - private class TestAgent : KernelAgent - { - protected internal override Task CreateChannelAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - protected internal override IEnumerable GetChannelKeys() - { - throw new NotImplementedException(); - } - } + private sealed class NextAgent : MockAgent; } diff --git a/dotnet/src/Agents/UnitTests/AgentChatTests.cs b/dotnet/src/Agents/UnitTests/AgentChatTests.cs index fc295e2b5550..fe6af0b3aee6 100644 --- a/dotnet/src/Agents/UnitTests/AgentChatTests.cs +++ b/dotnet/src/Agents/UnitTests/AgentChatTests.cs @@ -3,9 +3,11 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests; @@ -21,36 +23,36 @@ public class AgentChatTests [Fact] public async Task VerifyAgentChatLifecycleAsync() { - // Create chat + // Arrange: Create chat TestChat chat = new(); - // Verify initial state + // Assert: Verify initial state Assert.False(chat.IsActive); await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history - // Inject history + // Act: Inject history chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "More")]); chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "And then some")]); - // Verify updated history + // Assert: Verify updated history await this.VerifyHistoryAsync(expectedCount: 2, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent hasn't joined - // Invoke with input & verify (agent joins chat) + // Act: Invoke with input & verify (agent joins chat) chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi")); await chat.InvokeAsync().ToArrayAsync(); - Assert.Equal(1, chat.Agent.InvokeCount); - // Verify updated history + // Assert: Verify updated history + Assert.Equal(1, chat.Agent.InvokeCount); await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync(chat.Agent)); // Agent history - // Invoke without input & verify + // Act: Invoke without input await chat.InvokeAsync().ToArrayAsync(); - Assert.Equal(2, chat.Agent.InvokeCount); - // Verify final history + // Assert: Verify final history + Assert.Equal(2, chat.Agent.InvokeCount); await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync(chat.Agent)); // Agent history @@ -63,19 +65,46 @@ public async Task VerifyAgentChatLifecycleAsync() await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history } + /// + /// Verify throw exception for system message. + /// + [Fact] + public void VerifyAgentChatRejectsSystemMessage() + { + // Arrange: Create chat + TestChat chat = new() { LoggerFactory = new Mock().Object }; + + // Assert and Act: Verify system message not accepted + Assert.Throws(() => chat.AddChatMessage(new ChatMessageContent(AuthorRole.System, "hi"))); + } + + /// + /// Verify throw exception for if invoked when active. + /// + [Fact] + public async Task VerifyAgentChatThrowsWhenActiveAsync() + { + // Arrange: Create chat + TestChat chat = new(); + + // Assert and Act: Verify system message not accepted + await Assert.ThrowsAsync(() => chat.InvalidInvokeAsync().ToArrayAsync().AsTask()); + } + /// /// Verify the management of instances as they join . /// [Fact(Skip = "Not 100% reliable for github workflows, but useful for dev testing.")] public async Task VerifyGroupAgentChatConcurrencyAsync() { + // Arrange TestChat chat = new(); Task[] tasks; int isActive = 0; - // Queue concurrent tasks + // Act: Queue concurrent tasks object syncObject = new(); lock (syncObject) { @@ -97,7 +126,7 @@ public async Task VerifyGroupAgentChatConcurrencyAsync() await Task.Yield(); - // Verify failure + // Assert: Verify failure await Assert.ThrowsAsync(() => Task.WhenAll(tasks)); async Task SynchronizedInvokeAsync() @@ -127,5 +156,12 @@ private sealed class TestChat : AgentChat public override IAsyncEnumerable InvokeAsync( CancellationToken cancellationToken = default) => this.InvokeAgentAsync(this.Agent, cancellationToken); + + public IAsyncEnumerable InvalidInvokeAsync( + CancellationToken cancellationToken = default) + { + this.SetActivityOrThrow(); + return this.InvokeAgentAsync(this.Agent, cancellationToken); + } } } diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj index d46a4ee0cd1e..6b9fea49fde2 100644 --- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj +++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj @@ -8,7 +8,7 @@ true false 12 - $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110 + $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001 @@ -32,9 +32,9 @@ + - diff --git a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs index 1a607ea7e6c7..e6668c7ea568 100644 --- a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs @@ -21,6 +21,7 @@ public class AggregatorAgentTests [InlineData(AggregatorMode.Flat, 2)] public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeOffset) { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); @@ -44,38 +45,57 @@ public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeO // Add message to outer chat (no agent has joined) uberChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test uber")); + // Act var messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent hasn't joined chat, no broadcast + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent hasn't joined chat, no broadcast - // Add message to inner chat (not visible to parent) + // Arrange: Add message to inner chat (not visible to parent) groupChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test inner")); + // Act messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent still hasn't joined chat + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); - // Invoke outer chat (outer chat captures final inner message) + // Act: Invoke outer chat (outer chat captures final inner message) messages = await uberChat.InvokeAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Equal(1 + modeOffset, messages.Length); // New messages generated from inner chat + // Act messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(2 + modeOffset, messages.Length); // Total messages on uber chat + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized (agent equivalent) } diff --git a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs index 7c3267e3ad73..1c417a9e02ad 100644 --- a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs @@ -23,12 +23,18 @@ public class AgentGroupChatTests [Fact] public void VerifyGroupAgentChatDefaultState() { + // Arrange AgentGroupChat chat = new(); + + // Assert Assert.Empty(chat.Agents); Assert.NotNull(chat.ExecutionSettings); Assert.False(chat.IsComplete); + // Act chat.IsComplete = true; + + // Assert Assert.True(chat.IsComplete); } @@ -38,18 +44,25 @@ public void VerifyGroupAgentChatDefaultState() [Fact] public async Task VerifyGroupAgentChatAgentMembershipAsync() { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); Agent agent4 = CreateMockAgent(); AgentGroupChat chat = new(agent1, agent2); + + // Assert Assert.Equal(2, chat.Agents.Count); + // Act chat.AddAgent(agent3); + // Assert Assert.Equal(3, chat.Agents.Count); + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent4).ToArrayAsync(); + // Assert Assert.Equal(4, chat.Agents.Count); } @@ -59,6 +72,7 @@ public async Task VerifyGroupAgentChatAgentMembershipAsync() [Fact] public async Task VerifyGroupAgentChatMultiTurnAsync() { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); @@ -78,10 +92,14 @@ public async Task VerifyGroupAgentChatMultiTurnAsync() IsComplete = true }; + // Act and Assert await Assert.ThrowsAsync(() => chat.InvokeAsync(CancellationToken.None).ToArrayAsync().AsTask()); + // Act chat.ExecutionSettings.TerminationStrategy.AutomaticReset = true; var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync(); + + // Assert Assert.Equal(9, messages.Length); Assert.False(chat.IsComplete); @@ -108,6 +126,7 @@ public async Task VerifyGroupAgentChatMultiTurnAsync() [Fact] public async Task VerifyGroupAgentChatFailedSelectionAsync() { + // Arrange AgentGroupChat chat = Create3AgentChat(); chat.ExecutionSettings = @@ -125,6 +144,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync() // Remove max-limit in order to isolate the target behavior. chat.ExecutionSettings.TerminationStrategy.MaximumIterations = int.MaxValue; + // Act and Assert await Assert.ThrowsAsync(() => chat.InvokeAsync().ToArrayAsync().AsTask()); } @@ -134,6 +154,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync() [Fact] public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() { + // Arrange AgentGroupChat chat = Create3AgentChat(); chat.ExecutionSettings = @@ -147,7 +168,10 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() } }; + // Act var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.True(chat.IsComplete); } @@ -158,6 +182,7 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() [Fact] public async Task VerifyGroupAgentChatDiscreteTerminationAsync() { + // Arrange Agent agent1 = CreateMockAgent(); AgentGroupChat chat = @@ -175,7 +200,10 @@ public async Task VerifyGroupAgentChatDiscreteTerminationAsync() } }; + // Act var messages = await chat.InvokeAsync(agent1).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.True(chat.IsComplete); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs index d17391ee24be..ecb5cd6eee33 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs @@ -16,7 +16,10 @@ public class AgentGroupChatSettingsTests [Fact] public void VerifyChatExecutionSettingsDefault() { + // Arrange AgentGroupChatSettings settings = new(); + + // Assert Assert.IsType(settings.TerminationStrategy); Assert.Equal(1, settings.TerminationStrategy.MaximumIterations); Assert.IsType(settings.SelectionStrategy); @@ -28,6 +31,7 @@ public void VerifyChatExecutionSettingsDefault() [Fact] public void VerifyChatExecutionContinuationStrategyDefault() { + // Arrange Mock strategyMock = new(); AgentGroupChatSettings settings = new() @@ -35,6 +39,7 @@ public void VerifyChatExecutionContinuationStrategyDefault() TerminationStrategy = strategyMock.Object }; + // Assert Assert.Equal(strategyMock.Object, settings.TerminationStrategy); } @@ -44,6 +49,7 @@ public void VerifyChatExecutionContinuationStrategyDefault() [Fact] public void VerifyChatExecutionSelectionStrategyDefault() { + // Arrange Mock strategyMock = new(); AgentGroupChatSettings settings = new() @@ -51,6 +57,7 @@ public void VerifyChatExecutionSelectionStrategyDefault() SelectionStrategy = strategyMock.Object }; + // Assert Assert.NotNull(settings.SelectionStrategy); Assert.Equal(strategyMock.Object, settings.SelectionStrategy); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs index 6ad6fd75b18f..5af211c6cdf1 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs @@ -6,7 +6,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -22,7 +21,10 @@ public class AggregatorTerminationStrategyTests [Fact] public void VerifyAggregateTerminationStrategyInitialState() { + // Arrange AggregatorTerminationStrategy strategy = new(); + + // Assert Assert.Equal(AggregateTerminationCondition.All, strategy.Condition); } @@ -32,14 +34,16 @@ public void VerifyAggregateTerminationStrategyInitialState() [Fact] public async Task VerifyAggregateTerminationStrategyAnyAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMock = new(); + MockAgent agentMock = new(); + // Act and Assert await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockFalse) { Condition = AggregateTerminationCondition.Any, @@ -47,7 +51,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockFalse, strategyMockFalse) { Condition = AggregateTerminationCondition.Any, @@ -55,7 +59,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockTrue) { Condition = AggregateTerminationCondition.Any, @@ -68,14 +72,16 @@ await VerifyResultAsync( [Fact] public async Task VerifyAggregateTerminationStrategyAllAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMock = new(); + MockAgent agentMock = new(); + // Act and Assert await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockFalse) { Condition = AggregateTerminationCondition.All, @@ -83,7 +89,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockFalse, strategyMockFalse) { Condition = AggregateTerminationCondition.All, @@ -91,7 +97,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockTrue) { Condition = AggregateTerminationCondition.All, @@ -104,34 +110,39 @@ await VerifyResultAsync( [Fact] public async Task VerifyAggregateTerminationStrategyAgentAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMockA = new(); - Mock agentMockB = new(); + MockAgent agentMockA = new(); + MockAgent agentMockB = new(); + // Act and Assert await VerifyResultAsync( expectedResult: false, - agentMockB.Object, + agentMockB, new(strategyMockTrue, strategyMockTrue) { - Agents = [agentMockA.Object], + Agents = [agentMockA], Condition = AggregateTerminationCondition.All, }); await VerifyResultAsync( expectedResult: true, - agentMockB.Object, + agentMockB, new(strategyMockTrue, strategyMockTrue) { - Agents = [agentMockB.Object], + Agents = [agentMockB], Condition = AggregateTerminationCondition.All, }); } private static async Task VerifyResultAsync(bool expectedResult, Agent agent, AggregatorTerminationStrategy strategyRoot) { + // Act var result = await strategyRoot.ShouldTerminateAsync(agent, []); + + // Assert Assert.Equal(expectedResult, result); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs index 275ef0e0bf5e..a9f1d461ed85 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs @@ -5,7 +5,6 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -21,8 +20,9 @@ public class KernelFunctionSelectionStrategyTests [Fact] public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() { - Mock mockAgent = new(); - KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Object.Id)); + // Arrange + MockAgent mockAgent = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) @@ -32,16 +32,40 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() ResultParser = (result) => result.GetValue() ?? string.Empty, }; + // Assert Assert.Null(strategy.Arguments); Assert.NotNull(strategy.Kernel); Assert.NotNull(strategy.ResultParser); Assert.Equal("_a_", strategy.AgentsVariableName); Assert.Equal("_h_", strategy.HistoryVariableName); - Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []); + // Act + Agent nextAgent = await strategy.NextAsync([mockAgent], []); + // Assert Assert.NotNull(nextAgent); - Assert.Equal(mockAgent.Object, nextAgent); + Assert.Equal(mockAgent, nextAgent); + } + + /// + /// Verify strategy mismatch. + /// + [Fact] + public async Task VerifyKernelFunctionSelectionStrategyThrowsOnNullResultAsync() + { + // Arrange + MockAgent mockAgent = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id)); + + KernelFunctionSelectionStrategy strategy = + new(plugin.Single(), new()) + { + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, + ResultParser = (result) => "larry", + }; + + // Act and Assert + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// /// Verify default state and behavior @@ -49,21 +73,21 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync() { - Mock mockAgent1 = new(); - Mock mockAgent2 = new(); - KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Object.Id)); + MockAgent mockAgent1 = new(); + MockAgent mockAgent2 = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Id)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - InitialAgent = mockAgent1.Object, + InitialAgent = mockAgent1, ResultParser = (result) => result.GetValue() ?? string.Empty, }; - Agent nextAgent = await strategy.NextAsync([mockAgent2.Object], []); + Agent nextAgent = await strategy.NextAsync([mockAgent2], []); Assert.NotNull(nextAgent); - Assert.Equal(mockAgent1.Object, nextAgent); + Assert.Equal(mockAgent1, nextAgent); } /// @@ -72,25 +96,25 @@ public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync() { - Mock mockAgent = new(); + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(null)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, UseInitialAgentAsFallback = true }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// @@ -99,25 +123,27 @@ public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoInitialAgentAsync() { - Mock mockAgent = new(); + // Arrange + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad")); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, UseInitialAgentAsFallback = true }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + // Act and Assert + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// @@ -126,21 +152,21 @@ public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoIni [Fact] public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackAsync() { - Mock mockAgent = new(); + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad")); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, - InitialAgent = mockAgent.Object, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, + InitialAgent = mockAgent, UseInitialAgentAsFallback = true }; - Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []); + Agent nextAgent = await strategy.NextAsync([mockAgent], []); Assert.NotNull(nextAgent); - Assert.Equal(mockAgent.Object, nextAgent); + Assert.Equal(mockAgent, nextAgent); } private sealed class TestPlugin(string? agentName) diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs index 6f0b446e5e7a..7ee5cf838bc3 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs @@ -3,10 +3,8 @@ using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -22,17 +20,26 @@ public class KernelFunctionTerminationStrategyTests [Fact] public async Task VerifyKernelFunctionTerminationStrategyDefaultsAsync() { + // Arrange KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin()); - KernelFunctionTerminationStrategy strategy = new(plugin.Single(), new()); + KernelFunctionTerminationStrategy strategy = + new(plugin.Single(), new()) + { + AgentVariableName = "agent", + HistoryVariableName = "history", + }; + // Assert Assert.Null(strategy.Arguments); Assert.NotNull(strategy.Kernel); Assert.NotNull(strategy.ResultParser); + Assert.NotEqual("agent", KernelFunctionTerminationStrategy.DefaultAgentVariableName); + Assert.NotEqual("history", KernelFunctionTerminationStrategy.DefaultHistoryVariableName); - Mock mockAgent = new(); - - bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []); + // Act + MockAgent mockAgent = new(); + bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []); Assert.True(isTerminating); } @@ -52,9 +59,9 @@ public async Task VerifyKernelFunctionTerminationStrategyParsingAsync() ResultParser = (result) => string.Equals("test", result.GetValue(), StringComparison.OrdinalIgnoreCase) }; - Mock mockAgent = new(); + MockAgent mockAgent = new(); - bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []); + bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []); Assert.True(isTerminating); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs index a1b739ae1d1e..196a89ded6e3 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs @@ -2,10 +2,8 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.ChatCompletion; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -13,7 +11,7 @@ namespace SemanticKernel.Agents.UnitTests.Core.Chat; /// /// Unit testing of . /// -public class RegexTerminationStrategyTests +public partial class RegexTerminationStrategyTests { /// /// Verify abililty of strategy to match expression. @@ -21,10 +19,12 @@ public class RegexTerminationStrategyTests [Fact] public async Task VerifyExpressionTerminationStrategyAsync() { + // Arrange RegexTerminationStrategy strategy = new("test"); - Regex r = new("(?:^|\\W)test(?:$|\\W)"); + Regex r = MyRegex(); + // Act and Assert await VerifyResultAsync( expectedResult: false, new(r), @@ -38,9 +38,17 @@ await VerifyResultAsync( private static async Task VerifyResultAsync(bool expectedResult, RegexTerminationStrategy strategyRoot, string content) { + // Arrange ChatMessageContent message = new(AuthorRole.Assistant, content); - Mock agent = new(); - var result = await strategyRoot.ShouldTerminateAsync(agent.Object, [message]); + MockAgent agent = new(); + + // Act + var result = await strategyRoot.ShouldTerminateAsync(agent, [message]); + + // Assert Assert.Equal(expectedResult, result); } + + [GeneratedRegex("(?:^|\\W)test(?:$|\\W)")] + private static partial Regex MyRegex(); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs index bb8fb4665b36..2d06fb6d0078 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs @@ -3,7 +3,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -19,24 +18,27 @@ public class SequentialSelectionStrategyTests [Fact] public async Task VerifySequentialSelectionStrategyTurnsAsync() { - Mock agent1 = new(); - Mock agent2 = new(); + // Arrange + MockAgent agent1 = new(); + MockAgent agent2 = new(); - Agent[] agents = [agent1.Object, agent2.Object]; + Agent[] agents = [agent1, agent2]; SequentialSelectionStrategy strategy = new(); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + // Act and Assert + await VerifyNextAgentAsync(agent1, agents, strategy); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); + // Arrange strategy.Reset(); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); // Verify index does not exceed current bounds. - agents = [agent1.Object]; - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + agents = [agent1]; + await VerifyNextAgentAsync(agent1, agents, strategy); } /// @@ -45,25 +47,18 @@ public async Task VerifySequentialSelectionStrategyTurnsAsync() [Fact] public async Task VerifySequentialSelectionStrategyInitialAgentAsync() { - Mock agent1 = new(); - Mock agent2 = new(); + MockAgent agent1 = new(); + MockAgent agent2 = new(); - Agent[] agents = [agent1.Object, agent2.Object]; + Agent[] agents = [agent1, agent2]; SequentialSelectionStrategy strategy = new() { - InitialAgent = agent2.Object + InitialAgent = agent2 }; - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - } - - private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy) - { - Agent? nextAgent = await strategy.NextAsync(agents, []); - Assert.NotNull(nextAgent); - Assert.Equal(expectedAgent.Id, nextAgent.Id); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); } /// @@ -72,7 +67,19 @@ private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agen [Fact] public async Task VerifySequentialSelectionStrategyEmptyAsync() { + // Arrange SequentialSelectionStrategy strategy = new(); + + // Act and Assert await Assert.ThrowsAsync(() => strategy.NextAsync([], [])); } + + private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy) + { + // Act + Agent? nextAgent = await strategy.NextAsync(agents, []); + // Assert + Assert.NotNull(nextAgent); + Assert.Equal(expectedAgent.Id, nextAgent.Id); + } } diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs index c8a1c0578613..01debd8ded5f 100644 --- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs @@ -5,6 +5,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; using Moq; using Xunit; @@ -22,6 +23,7 @@ public class ChatCompletionAgentTests [Fact] public void VerifyChatCompletionAgentDefinition() { + // Arrange ChatCompletionAgent agent = new() { @@ -30,6 +32,7 @@ public void VerifyChatCompletionAgentDefinition() Name = "test name", }; + // Assert Assert.NotNull(agent.Id); Assert.Equal("test instructions", agent.Instructions); Assert.Equal("test description", agent.Description); @@ -43,7 +46,8 @@ public void VerifyChatCompletionAgentDefinition() [Fact] public async Task VerifyChatCompletionAgentInvocationAsync() { - var mockService = new Mock(); + // Arrange + Mock mockService = new(); mockService.Setup( s => s.GetChatMessageContentsAsync( It.IsAny(), @@ -51,16 +55,18 @@ public async Task VerifyChatCompletionAgentInvocationAsync() It.IsAny(), It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]); - var agent = - new ChatCompletionAgent() + ChatCompletionAgent agent = + new() { Instructions = "test instructions", Kernel = CreateKernel(mockService.Object), Arguments = [], }; - var result = await agent.InvokeAsync([]).ToArrayAsync(); + // Act + ChatMessageContent[] result = await agent.InvokeAsync([]).ToArrayAsync(); + // Assert Assert.Single(result); mockService.Verify( @@ -79,13 +85,14 @@ public async Task VerifyChatCompletionAgentInvocationAsync() [Fact] public async Task VerifyChatCompletionAgentStreamingAsync() { + // Arrange StreamingChatMessageContent[] returnContent = [ new(AuthorRole.Assistant, "wh"), new(AuthorRole.Assistant, "at?"), ]; - var mockService = new Mock(); + Mock mockService = new(); mockService.Setup( s => s.GetStreamingChatMessageContentsAsync( It.IsAny(), @@ -93,16 +100,18 @@ public async Task VerifyChatCompletionAgentStreamingAsync() It.IsAny(), It.IsAny())).Returns(returnContent.ToAsyncEnumerable()); - var agent = - new ChatCompletionAgent() + ChatCompletionAgent agent = + new() { Instructions = "test instructions", Kernel = CreateKernel(mockService.Object), Arguments = [], }; - var result = await agent.InvokeStreamingAsync([]).ToArrayAsync(); + // Act + StreamingChatMessageContent[] result = await agent.InvokeStreamingAsync([]).ToArrayAsync(); + // Assert Assert.Equal(2, result.Length); mockService.Verify( @@ -115,6 +124,52 @@ public async Task VerifyChatCompletionAgentStreamingAsync() Times.Once); } + /// + /// Verify the invocation and response of . + /// + [Fact] + public void VerifyChatCompletionServiceSelection() + { + // Arrange + Mock mockService = new(); + Kernel kernel = CreateKernel(mockService.Object); + + // Act + (IChatCompletionService service, PromptExecutionSettings? settings) = ChatCompletionAgent.GetChatCompletionService(kernel, null); + // Assert + Assert.Equal(mockService.Object, service); + Assert.Null(settings); + + // Act + (service, settings) = ChatCompletionAgent.GetChatCompletionService(kernel, []); + // Assert + Assert.Equal(mockService.Object, service); + Assert.Null(settings); + + // Act and Assert + Assert.Throws(() => ChatCompletionAgent.GetChatCompletionService(kernel, new KernelArguments(new PromptExecutionSettings() { ServiceId = "anything" }))); + } + + /// + /// Verify the invocation and response of . + /// + [Fact] + public void VerifyChatCompletionChannelKeys() + { + // Arrange + ChatCompletionAgent agent1 = new(); + ChatCompletionAgent agent2 = new(); + ChatCompletionAgent agent3 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) }; + ChatCompletionAgent agent4 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) }; + ChatCompletionAgent agent5 = new() { HistoryReducer = new ChatHistoryTruncationReducer(100) }; + + // Act ans Assert + Assert.Equal(agent1.GetChannelKeys(), agent2.GetChannelKeys()); + Assert.Equal(agent3.GetChannelKeys(), agent4.GetChannelKeys()); + Assert.NotEqual(agent1.GetChannelKeys(), agent3.GetChannelKeys()); + Assert.NotEqual(agent3.GetChannelKeys(), agent5.GetChannelKeys()); + } + private static Kernel CreateKernel(IChatCompletionService chatCompletionService) { var builder = Kernel.CreateBuilder(); diff --git a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs index 6732da6628e8..92aca7fadb67 100644 --- a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs @@ -1,11 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; using System.Linq; -using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core; @@ -22,21 +20,11 @@ public class ChatHistoryChannelTests [Fact] public async Task VerifyAgentWithoutIChatHistoryHandlerAsync() { - TestAgent agent = new(); // Not a IChatHistoryHandler + // Arrange + Mock agent = new(); // Not a IChatHistoryHandler ChatHistoryChannel channel = new(); // Requires IChatHistoryHandler - await Assert.ThrowsAsync(() => channel.InvokeAsync(agent).ToArrayAsync().AsTask()); - } - - private sealed class TestAgent : KernelAgent - { - protected internal override Task CreateChannelAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - protected internal override IEnumerable GetChannelKeys() - { - throw new NotImplementedException(); - } + // Act & Assert + await Assert.ThrowsAsync(() => channel.InvokeAsync(agent.Object).ToArrayAsync().AsTask()); } } diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs index a75533474147..d9042305d9fa 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs @@ -30,8 +30,10 @@ public class ChatHistoryReducerExtensionsTests [InlineData(100, 0, int.MaxValue, 100)] public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? endIndex = null, int? expectedCount = null) { + // Arrange ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + // Act ChatMessageContent[] extractedHistory = history.Extract(startIndex, endIndex).ToArray(); int finalIndex = endIndex ?? messageCount - 1; @@ -39,6 +41,7 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e expectedCount ??= finalIndex - startIndex + 1; + // Assert Assert.Equal(expectedCount, extractedHistory.Length); if (extractedHistory.Length > 0) @@ -58,16 +61,19 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e [InlineData(100, 0)] public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount) { + // Arrange ChatHistory summaries = [.. MockHistoryGenerator.CreateSimpleHistory(summaryCount)]; foreach (ChatMessageContent summary in summaries) { summary.Metadata = new Dictionary() { { "summary", true } }; } + // Act ChatHistory history = [.. summaries, .. MockHistoryGenerator.CreateSimpleHistory(regularCount)]; int finalSummaryIndex = history.LocateSummarizationBoundary("summary"); + // Assert Assert.Equal(summaryCount, finalSummaryIndex); } @@ -77,17 +83,22 @@ public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount) [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange ChatHistory history = []; + Mock mockReducer = new(); + mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null); + // Act bool isReduced = await history.ReduceAsync(null, default); + // Assert Assert.False(isReduced); Assert.Empty(history); - Mock mockReducer = new(); - mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null); + // Act isReduced = await history.ReduceAsync(mockReducer.Object, default); + // Assert Assert.False(isReduced); Assert.Empty(history); } @@ -98,13 +109,16 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange Mock mockReducer = new(); mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)[]); ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(10)]; + // Act bool isReduced = await history.ReduceAsync(mockReducer.Object, default); + // Assert Assert.True(isReduced); Assert.Empty(history); } @@ -124,11 +138,13 @@ public async Task VerifyChatHistoryReducedAsync() [InlineData(900, 500, int.MaxValue)] public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount, int? thresholdCount = null) { - // Shape of history doesn't matter since reduction is not expected + // Arrange: Shape of history doesn't matter since reduction is not expected ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.Equal(0, reductionIndex); } @@ -146,11 +162,13 @@ public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount [InlineData(1000, 500, 499)] public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCount, int? thresholdCount = null) { - // Generate history with only assistant messages + // Arrange: Generate history with only assistant messages ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); Assert.Equal(targetCount, messageCount - reductionIndex); } @@ -170,17 +188,20 @@ public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCoun [InlineData(1000, 500, 499)] public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int targetCount, int? thresholdCount = null) { - // Generate history with alternating user and assistant messages + // Arrange: Generate history with alternating user and assistant messages ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); - // The reduction length should align with a user message, if threshold is specified + // Act: The reduction length should align with a user message, if threshold is specified bool hasThreshold = thresholdCount > 0; int expectedCount = targetCount + (hasThreshold && sourceHistory[^targetCount].Role != AuthorRole.User ? 1 : 0); + // Assert Assert.Equal(expectedCount, messageCount - reductionIndex); } @@ -201,14 +222,16 @@ public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int ta [InlineData(9)] public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, int? thresholdCount = null) { - // Generate a history with function call on index 5 and 9 and + // Arrange: Generate a history with function call on index 5 and 9 and // function result on index 6 and 10 (total length: 14) ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithFunctionContent()]; ChatHistoryTruncationReducer reducer = new(targetCount, thresholdCount); + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); // The reduction length avoid splitting function call and result, regardless of threshold @@ -216,7 +239,7 @@ public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, i if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionCallContent)) { - expectedCount += 1; + expectedCount++; } else if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionResultContent)) { diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs index f464b6a8214a..53e93d0026c3 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs @@ -23,10 +23,12 @@ public class ChatHistorySummarizationReducerTests [InlineData(-1)] [InlineData(-1, int.MaxValue)] [InlineData(int.MaxValue, -1)] - public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null) + public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null) { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); + // Act & Assert Assert.Throws(() => new ChatHistorySummarizationReducer(mockCompletionService.Object, targetCount, thresholdCount)); } @@ -34,15 +36,17 @@ public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? /// Verify object state after initialization. /// [Fact] - public void VerifyChatHistoryInitializationState() + public void VerifyInitializationState() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + // Assert Assert.Equal(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions); Assert.True(reducer.FailOnError); + // Act reducer = new(mockCompletionService.Object, 10) { @@ -50,25 +54,62 @@ public void VerifyChatHistoryInitializationState() SummarizationInstructions = "instructions", }; + // Assert Assert.NotEqual(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions); Assert.False(reducer.FailOnError); } + /// + /// Validate equality override. + /// + [Fact] + public void VerifyEquality() + { + // Arrange + Mock mockCompletionService = this.CreateMockCompletionService(); + + ChatHistorySummarizationReducer reducer1 = new(mockCompletionService.Object, 3, 3); + ChatHistorySummarizationReducer reducer2 = new(mockCompletionService.Object, 3, 3); + ChatHistorySummarizationReducer reducer3 = new(mockCompletionService.Object, 3, 3) { UseSingleSummary = false }; + ChatHistorySummarizationReducer reducer4 = new(mockCompletionService.Object, 3, 3) { SummarizationInstructions = "override" }; + ChatHistorySummarizationReducer reducer5 = new(mockCompletionService.Object, 4, 3); + ChatHistorySummarizationReducer reducer6 = new(mockCompletionService.Object, 3, 5); + ChatHistorySummarizationReducer reducer7 = new(mockCompletionService.Object, 3); + ChatHistorySummarizationReducer reducer8 = new(mockCompletionService.Object, 3); + + // Assert + Assert.True(reducer1.Equals(reducer1)); + Assert.True(reducer1.Equals(reducer2)); + Assert.True(reducer7.Equals(reducer8)); + Assert.True(reducer3.Equals(reducer3)); + Assert.True(reducer4.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer5)); + Assert.False(reducer1.Equals(reducer6)); + Assert.False(reducer1.Equals(reducer7)); + Assert.False(reducer1.Equals(reducer8)); + Assert.False(reducer1.Equals(null)); + } + /// /// Validate hash-code expresses reducer equivalency. /// [Fact] - public void VerifyChatHistoryHasCode() + public void VerifyHashCode() { + // Arrange HashSet reducers = []; Mock mockCompletionService = this.CreateMockCompletionService(); + // Act int hashCode1 = GenerateHashCode(3, 4); int hashCode2 = GenerateHashCode(33, 44); int hashCode3 = GenerateHashCode(3000, 4000); int hashCode4 = GenerateHashCode(3000, 4000); + // Assert Assert.NotEqual(hashCode1, hashCode2); Assert.NotEqual(hashCode2, hashCode3); Assert.Equal(hashCode3, hashCode4); @@ -90,12 +131,15 @@ int GenerateHashCode(int targetCount, int thresholdCount) [Fact] public async Task VerifyChatHistoryReductionSilentFailureAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10) { FailOnError = false }; + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -105,10 +149,12 @@ public async Task VerifyChatHistoryReductionSilentFailureAsync() [Fact] public async Task VerifyChatHistoryReductionThrowsOnFailureAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act and Assert await Assert.ThrowsAsync(() => reducer.ReduceAsync(sourceHistory)); } @@ -118,12 +164,15 @@ public async Task VerifyChatHistoryReductionThrowsOnFailureAsync() [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 20); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -133,12 +182,15 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11); VerifySummarization(messages[0]); } @@ -149,19 +201,24 @@ public async Task VerifyChatHistoryReducedAsync() [Fact] public async Task VerifyChatHistoryRereducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11); VerifySummarization(messages[0]); + // Act reducer = new(mockCompletionService.Object, 10) { UseSingleSummary = false }; reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert messages = VerifyReducedHistory(reducedHistory, 12); VerifySummarization(messages[0]); VerifySummarization(messages[1]); diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs index eebcf8fc6136..9d8b2e721fdf 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs @@ -21,24 +21,54 @@ public class ChatHistoryTruncationReducerTests [InlineData(-1)] [InlineData(-1, int.MaxValue)] [InlineData(int.MaxValue, -1)] - public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null) + public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null) { + // Act and Assert Assert.Throws(() => new ChatHistoryTruncationReducer(targetCount, thresholdCount)); } + /// + /// Validate equality override. + /// + [Fact] + public void VerifyEquality() + { + // Arrange + ChatHistoryTruncationReducer reducer1 = new(3, 3); + ChatHistoryTruncationReducer reducer2 = new(3, 3); + ChatHistoryTruncationReducer reducer3 = new(4, 3); + ChatHistoryTruncationReducer reducer4 = new(3, 5); + ChatHistoryTruncationReducer reducer5 = new(3); + ChatHistoryTruncationReducer reducer6 = new(3); + + // Assert + Assert.True(reducer1.Equals(reducer1)); + Assert.True(reducer1.Equals(reducer2)); + Assert.True(reducer5.Equals(reducer6)); + Assert.True(reducer3.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer5)); + Assert.False(reducer1.Equals(reducer6)); + Assert.False(reducer1.Equals(null)); + } + /// /// Validate hash-code expresses reducer equivalency. /// [Fact] - public void VerifyChatHistoryHasCode() + public void VerifyHashCode() { + // Arrange HashSet reducers = []; + // Act int hashCode1 = GenerateHashCode(3, 4); int hashCode2 = GenerateHashCode(33, 44); int hashCode3 = GenerateHashCode(3000, 4000); int hashCode4 = GenerateHashCode(3000, 4000); + // Assert Assert.NotEqual(hashCode1, hashCode2); Assert.NotEqual(hashCode2, hashCode3); Assert.Equal(hashCode3, hashCode4); @@ -60,11 +90,14 @@ int GenerateHashCode(int targetCount, int thresholdCount) [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(10).ToArray(); - ChatHistoryTruncationReducer reducer = new(20); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -74,11 +107,14 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistoryTruncationReducer reducer = new(10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert VerifyReducedHistory(reducedHistory, 10); } @@ -88,12 +124,15 @@ public async Task VerifyChatHistoryReducedAsync() [Fact] public async Task VerifyChatHistoryRereducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistoryTruncationReducer reducer = new(10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert VerifyReducedHistory(reducedHistory, 10); } diff --git a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs index 14a938a7b169..d7f370e3734c 100644 --- a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs @@ -19,10 +19,12 @@ public class ChatHistoryExtensionsTests [Fact] public void VerifyChatHistoryOrdering() { + // Arrange ChatHistory history = []; history.AddUserMessage("Hi"); history.AddAssistantMessage("Hi"); + // Act and Assert VerifyRole(AuthorRole.User, history.First()); VerifyRole(AuthorRole.Assistant, history.Last()); @@ -36,10 +38,12 @@ public void VerifyChatHistoryOrdering() [Fact] public async Task VerifyChatHistoryOrderingAsync() { + // Arrange ChatHistory history = []; history.AddUserMessage("Hi"); history.AddAssistantMessage("Hi"); + // Act and Assert VerifyRole(AuthorRole.User, history.First()); VerifyRole(AuthorRole.Assistant, history.Last()); diff --git a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs index 987c67fce804..720197a90c55 100644 --- a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs +++ b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs @@ -22,8 +22,10 @@ public class BroadcastQueueTests [Fact] public void VerifyBroadcastQueueDefaultConfiguration() { + // Arrange BroadcastQueue queue = new(); + // Assert Assert.True(queue.BlockDuration.TotalSeconds > 0); } @@ -33,7 +35,7 @@ public void VerifyBroadcastQueueDefaultConfiguration() [Fact] public async Task VerifyBroadcastQueueReceiveAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { @@ -42,23 +44,31 @@ public async Task VerifyBroadcastQueueReceiveAsync() TestChannel channel = new(); ChannelReference reference = new(channel, "test"); - // Verify initial state + // Act: Verify initial state await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify empty invocation with no channels. + // Act: Verify empty invocation with no channels. queue.Enqueue([], []); await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify empty invocation of channel. + // Act: Verify empty invocation of channel. queue.Enqueue([reference], []); await VerifyReceivingStateAsync(receiveCount: 1, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify expected invocation of channel. + // Act: Verify expected invocation of channel. queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]); await VerifyReceivingStateAsync(receiveCount: 2, queue, channel, "test"); + + // Assert Assert.NotEmpty(channel.ReceivedMessages); } @@ -68,7 +78,7 @@ public async Task VerifyBroadcastQueueReceiveAsync() [Fact] public async Task VerifyBroadcastQueueFailureAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { @@ -77,9 +87,10 @@ public async Task VerifyBroadcastQueueFailureAsync() BadChannel channel = new(); ChannelReference reference = new(channel, "test"); - // Verify expected invocation of channel. + // Act: Verify expected invocation of channel. queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]); + // Assert await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); @@ -91,7 +102,7 @@ public async Task VerifyBroadcastQueueFailureAsync() [Fact] public async Task VerifyBroadcastQueueConcurrencyAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { @@ -100,7 +111,7 @@ public async Task VerifyBroadcastQueueConcurrencyAsync() TestChannel channel = new(); ChannelReference reference = new(channel, "test"); - // Enqueue multiple channels + // Act: Enqueue multiple channels for (int count = 0; count < 10; ++count) { queue.Enqueue([new(channel, $"test{count}")], [new ChatMessageContent(AuthorRole.User, "hi")]); @@ -112,7 +123,7 @@ public async Task VerifyBroadcastQueueConcurrencyAsync() await queue.EnsureSynchronizedAsync(new ChannelReference(channel, $"test{count}")); } - // Verify result + // Assert Assert.NotEmpty(channel.ReceivedMessages); Assert.Equal(10, channel.ReceivedMessages.Count); } diff --git a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs index 0a9715f25115..13cc3203d58c 100644 --- a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs +++ b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs @@ -17,21 +17,24 @@ public class KeyEncoderTests [Fact] public void VerifyKeyEncoderUniqueness() { + // Act this.VerifyHashEquivalancy([]); this.VerifyHashEquivalancy(nameof(KeyEncoderTests)); this.VerifyHashEquivalancy(nameof(KeyEncoderTests), "http://localhost", "zoo"); - // Verify "well-known" value + // Assert: Verify "well-known" value string localHash = KeyEncoder.GenerateHash([typeof(ChatHistoryChannel).FullName!]); Assert.Equal("Vdx37EnWT9BS+kkCkEgFCg9uHvHNw1+hXMA4sgNMKs4=", localHash); } private void VerifyHashEquivalancy(params string[] keys) { + // Act string hash1 = KeyEncoder.GenerateHash(keys); string hash2 = KeyEncoder.GenerateHash(keys); string hash3 = KeyEncoder.GenerateHash(keys.Concat(["another"])); + // Assert Assert.Equal(hash1, hash2); Assert.NotEqual(hash1, hash3); } diff --git a/dotnet/src/Agents/UnitTests/MockAgent.cs b/dotnet/src/Agents/UnitTests/MockAgent.cs index b8b7f295e02b..6e20c0434b93 100644 --- a/dotnet/src/Agents/UnitTests/MockAgent.cs +++ b/dotnet/src/Agents/UnitTests/MockAgent.cs @@ -11,7 +11,7 @@ namespace SemanticKernel.Agents.UnitTests; /// /// Mock definition of with a contract. /// -internal sealed class MockAgent : ChatHistoryKernelAgent +internal class MockAgent : ChatHistoryKernelAgent { public int InvokeCount { get; private set; } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs new file mode 100644 index 000000000000..cd51c736ac18 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +internal static class AssertCollection +{ + public static void Equal(IReadOnlyList? source, IReadOnlyList? target, Func? adapter = null) + { + if (source == null) + { + Assert.Null(target); + return; + } + + Assert.NotNull(target); + Assert.Equal(source.Count, target.Count); + + adapter ??= (x) => x; + + for (int i = 0; i < source.Count; i++) + { + Assert.Equal(adapter(source[i]), adapter(target[i])); + } + } + + public static void Equal(IReadOnlyDictionary? source, IReadOnlyDictionary? target) + { + if (source == null) + { + Assert.Null(target); + return; + } + + Assert.NotNull(target); + Assert.Equal(source.Count, target.Count); + + foreach ((TKey key, TValue value) in source) + { + Assert.True(target.TryGetValue(key, out TValue? targetValue)); + Assert.Equal(value, targetValue); + } + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs index b1e4d397eded..6288c6a5aed8 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs @@ -2,7 +2,7 @@ using System.Linq; using Azure.Core; using Azure.Core.Pipeline; -using Microsoft.SemanticKernel.Agents.OpenAI.Azure; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure; @@ -18,14 +18,17 @@ public class AddHeaderRequestPolicyTests [Fact] public void VerifyAddHeaderRequestPolicyExecution() { + // Arrange using HttpClientTransport clientTransport = new(); HttpPipeline pipeline = new(clientTransport); HttpMessage message = pipeline.CreateMessage(); - AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue"); + + // Act policy.OnSendingRequest(message); + // Assert Assert.Single(message.Request.Headers); HttpHeader header = message.Request.Headers.Single(); Assert.Equal("testname", header.Name); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs index 0b0a0707e49a..97dbf32903d6 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; using Xunit; using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI; @@ -29,7 +29,10 @@ public void VerifyToMessageRole() private void VerifyRoleConversion(AuthorRole inputRole, MessageRole expectedRole) { + // Arrange MessageRole convertedRole = inputRole.ToMessageRole(); + + // Assert Assert.Equal(expectedRole, convertedRole); } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs index 3f982f3a7b47..70c27ccb2152 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs @@ -17,11 +17,15 @@ public class KernelExtensionsTests [Fact] public void VerifyGetKernelFunctionLookup() { + // Arrange Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); + // Act KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-'); + + // Assert Assert.NotNull(function); Assert.Equal(nameof(TestPlugin.TestFunction), function.Name); } @@ -32,10 +36,12 @@ public void VerifyGetKernelFunctionLookup() [Fact] public void VerifyGetKernelFunctionInvalid() { + // Arrange Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); + // Act and Assert Assert.Throws(() => kernel.GetKernelFunction("a", '-')); Assert.Throws(() => kernel.GetKernelFunction("a-b", ':')); Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-')); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs index eeb8a4d3b9d1..acf195840366 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; using System.ComponentModel; -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; +using OpenAI.Assistants; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions; @@ -19,18 +19,28 @@ public class KernelFunctionExtensionsTests [Fact] public void VerifyKernelFunctionToFunctionTool() { + // Arrange KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + // Assert Assert.Equal(2, plugin.FunctionCount); + // Arrange KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)]; KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)]; - FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin", "-"); - Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal); + // Act + FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.FunctionName, StringComparison.Ordinal); Assert.Equal("test description", definition1.Description); - FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin", "-"); - Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal); + // Act + FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.FunctionName, StringComparison.Ordinal); Assert.Equal("test description", definition2.Description); } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs new file mode 100644 index 000000000000..50dec2cb95ae --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs @@ -0,0 +1,210 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal; + +/// +/// Unit testing of . +/// +public class AssistantMessageFactoryTests +{ + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsDefault() + { + // Arrange (Setup message with null metadata) + ChatMessageContent message = new(AuthorRole.User, "test"); + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.Empty(options.Metadata); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataEmpty() + { + // Arrange Setup message with empty metadata + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = new Dictionary() + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.Empty(options.Metadata); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadata() + { + // Arrange: Setup message with metadata + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = + new Dictionary() + { + { "a", 1 }, + { "b", "2" }, + } + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.NotEmpty(options.Metadata); + Assert.Equal(2, options.Metadata.Count); + Assert.Equal("1", options.Metadata["a"]); + Assert.Equal("2", options.Metadata["b"]); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataNull() + { + // Arrange: Setup message with null metadata value + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = + new Dictionary() + { + { "a", null }, + { "b", "2" }, + } + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.NotEmpty(options.Metadata); + Assert.Equal(2, options.Metadata.Count); + Assert.Equal(string.Empty, options.Metadata["a"]); + Assert.Equal("2", options.Metadata["b"]); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageContentsWithText() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().Text); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageUrl() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageUrl); + } + + /// + /// Verify options creation. + /// + [Fact(Skip = "API bug with data Uri construction")] + public void VerifyAssistantMessageAdapterGetMessageWithImageData() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageUrl); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageFile() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageFileId); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithAll() + { + // Arrange + ChatMessageContent message = + new( + AuthorRole.User, + items: + [ + new TextContent("test"), + new ImageContent(new Uri("https://localhost/myimage.png")), + new FileReferenceContent("file-id") + ]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Equal(3, contents.Length); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs new file mode 100644 index 000000000000..d6bcf91b8a94 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal; + +/// +/// Unit testing of . +/// +public class AssistantRunOptionsFactoryTests +{ + /// + /// Verify run options generation with null . + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsNullTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null); + + // Assert + Assert.NotNull(options); + Assert.Null(options.Temperature); + Assert.Null(options.NucleusSamplingFactor); + Assert.Empty(options.Metadata); + } + + /// + /// Verify run options generation with equivalent . + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + Temperature = 0.5F, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions); + + // Assert + Assert.NotNull(options); + Assert.Null(options.Temperature); + Assert.Null(options.NucleusSamplingFactor); + } + + /// + /// Verify run options generation with override. + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + TruncationMessageCount = 5, + }, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + Temperature = 0.9F, + TruncationMessageCount = 8, + EnableJsonResponse = true, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions); + + // Assert + Assert.NotNull(options); + Assert.Equal(0.9F, options.Temperature); + Assert.Equal(8, options.TruncationStrategy.LastMessages); + Assert.Equal(AssistantResponseFormat.JsonObject, options.ResponseFormat); + Assert.Null(options.NucleusSamplingFactor); + } + + /// + /// Verify run options generation with metadata. + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + TruncationMessageCount = 5, + }, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + Metadata = new Dictionary + { + { "key1", "value" }, + { "key2", null! }, + }, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions); + + // Assert + Assert.Equal(2, options.Metadata.Count); + Assert.Equal("value", options.Metadata["key1"]); + Assert.Equal(string.Empty, options.Metadata["key2"]); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs index 1d9a9ec9dfcf..ef67c48f1473 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs @@ -4,12 +4,14 @@ using System.Linq; using System.Net; using System.Net.Http; +using System.Text; +using System.Text.Json; using System.Threading.Tasks; -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI; @@ -30,100 +32,257 @@ public sealed class OpenAIAssistantAgentTests : IDisposable [Fact] public async Task VerifyOpenAIAssistantAgentCreationEmptyAsync() { - OpenAIAssistantDefinition definition = - new() - { - ModelId = "testmodel", - }; - - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, - this.CreateTestConfiguration(targetAzure: true, useVersion: true), - definition); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.Null(agent.Instructions); - Assert.Null(agent.Name); - Assert.Null(agent.Description); - Assert.False(agent.IsDeleted); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); } /// /// Verify the invocation and response of - /// for an agent with optional properties defined. + /// for an agent with name, instructions, and description. /// [Fact] public async Task VerifyOpenAIAssistantAgentCreationPropertiesAsync() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { - ModelId = "testmodel", Name = "testname", Description = "testdescription", Instructions = "testinstructions", }; - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentFull); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, - this.CreateTestConfiguration(), - definition); + /// + /// Verify the invocation and response of + /// for an agent with code-interpreter enabled. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableCodeInterpreter = true, + }; - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.NotNull(agent.Instructions); - Assert.NotNull(agent.Name); - Assert.NotNull(agent.Description); - Assert.False(agent.IsDeleted); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); } /// /// Verify the invocation and response of - /// for an agent that has all properties defined.. + /// for an agent with code-interpreter files. /// [Fact] - public async Task VerifyOpenAIAssistantAgentCreationEverythingAsync() + public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterFilesAsync() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { - ModelId = "testmodel", EnableCodeInterpreter = true, - EnableRetrieval = true, - FileIds = ["#1", "#2"], - Metadata = new Dictionary() { { "a", "1" } }, + CodeInterpreterFileIds = ["file1", "file2"], }; - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentWithEverything); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, - this.CreateTestConfiguration(), - definition); + /// + /// Verify the invocation and response of + /// for an agent with a file-search and no vector-store + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithFileSearchAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableFileSearch = true, + }; - Assert.NotNull(agent); - Assert.Equal(2, agent.Tools.Count); - Assert.True(agent.Tools.OfType().Any()); - Assert.True(agent.Tools.OfType().Any()); - Assert.NotEmpty(agent.FileIds); - Assert.NotEmpty(agent.Metadata); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with a vector-store-id (for file-search). + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithVectorStoreAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableFileSearch = true, + VectorStoreId = "#vs1", + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with metadata. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithMetadataAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + Metadata = new Dictionary() + { + { "a", "1" }, + { "b", "2" }, + }, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with json-response mode enabled. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithJsonResponseAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableJsonResponse = true, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with temperature defined. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithTemperatureAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + Temperature = 2.0F, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with topP defined. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithTopPAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + TopP = 2.0F, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with empty execution settings. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = new OpenAIAssistantExecutionOptions(), + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with populated execution settings. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithExecutionOptionsAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = + new() + { + MaxCompletionTokens = 100, + ParallelToolCallsEnabled = false, + } + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with execution settings and meta-data. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAndMetadataAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = new(), + Metadata = new Dictionary() + { + { "a", "1" }, + { "b", "2" }, + }, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); } /// /// Verify the invocation and response of . /// [Fact] - public async Task VerifyOpenAIAssistantAgentRetrieveAsync() + public async Task VerifyOpenAIAssistantAgentRetrievalAsync() { - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); + + this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition)); OpenAIAssistantAgent agent = await OpenAIAssistantAgent.RetrieveAsync( @@ -131,12 +290,8 @@ await OpenAIAssistantAgent.RetrieveAsync( this.CreateTestConfiguration(), "#id"); - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.Null(agent.Instructions); - Assert.Null(agent.Name); - Assert.Null(agent.Description); - Assert.False(agent.IsDeleted); + // Act and Assert + ValidateAgentDefinition(agent, definition); } /// @@ -145,16 +300,50 @@ await OpenAIAssistantAgent.RetrieveAsync( [Fact] public async Task VerifyOpenAIAssistantAgentDeleteAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + // Assert Assert.False(agent.IsDeleted); + // Arrange this.SetupResponse(HttpStatusCode.OK, ResponseContent.DeleteAgent); + // Act await agent.DeleteAsync(); + // Assert Assert.True(agent.IsDeleted); + // Act await agent.DeleteAsync(); // Doesn't throw + // Assert Assert.True(agent.IsDeleted); + await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test"))); + await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask()); + } + + /// + /// Verify the deletion of agent via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreateThreadAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread); + + // Act + string threadId = await agent.CreateThreadAsync(); + // Assert + Assert.NotNull(threadId); + + // Arrange + this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread); + + // Act + threadId = await agent.CreateThreadAsync(new()); + // Assert + Assert.NotNull(threadId); } /// @@ -163,6 +352,7 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync() [Fact] public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( @@ -174,7 +364,11 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync() ResponseContent.GetTextMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -186,6 +380,7 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync() [Fact] public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( @@ -197,7 +392,11 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync() ResponseContent.GetTextMessageWithAnnotation); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Equal(2, messages[0].Items.Count); Assert.NotNull(messages[0].Items.SingleOrDefault(c => c is TextContent)); @@ -210,6 +409,7 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync() [Fact] public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( @@ -221,7 +421,11 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync() ResponseContent.GetImageMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -233,7 +437,7 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync() [Fact] public async Task VerifyOpenAIAssistantAgentGetMessagesAsync() { - // Create agent + // Arrange: Create agent OpenAIAssistantAgent agent = await this.CreateAgentAsync(); // Initialize agent channel @@ -246,18 +450,22 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync() ResponseContent.GetTextMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + // Assert Assert.Single(messages); - // Setup messages + // Arrange: Setup messages this.SetupResponses( HttpStatusCode.OK, ResponseContent.ListMessagesPageMore, ResponseContent.ListMessagesPageMore, ResponseContent.ListMessagesPageFinal); - // Get messages and verify + // Act: Get messages messages = await chat.GetChatMessagesAsync(agent).ToArrayAsync(); + // Assert Assert.Equal(5, messages.Length); } @@ -267,7 +475,7 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync() [Fact] public async Task VerifyOpenAIAssistantAgentAddMessagesAsync() { - // Create agent + // Arrange: Create agent OpenAIAssistantAgent agent = await this.CreateAgentAsync(); // Initialize agent channel @@ -279,12 +487,18 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync() ResponseContent.MessageSteps, ResponseContent.GetTextMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + // Assert Assert.Single(messages); + // Arrange chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi")); + // Act messages = await chat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(2, messages.Length); } @@ -294,6 +508,7 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync() [Fact] public async Task VerifyOpenAIAssistantAgentListDefinitionAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( @@ -302,20 +517,24 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync() ResponseContent.ListAgentsPageMore, ResponseContent.ListAgentsPageFinal); + // Act var messages = await OpenAIAssistantAgent.ListDefinitionsAsync( this.CreateTestConfiguration()).ToArrayAsync(); + // Assert Assert.Equal(7, messages.Length); + // Arrange this.SetupResponses( HttpStatusCode.OK, ResponseContent.ListAgentsPageMore, - ResponseContent.ListAgentsPageMore); + ResponseContent.ListAgentsPageFinal); + // Act messages = await OpenAIAssistantAgent.ListDefinitionsAsync( - this.CreateTestConfiguration(), - maxResults: 4).ToArrayAsync(); + this.CreateTestConfiguration()).ToArrayAsync(); + // Assert Assert.Equal(4, messages.Length); } @@ -325,6 +544,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync( [Fact] public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -342,7 +562,11 @@ public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync() ResponseContent.GetTextMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -365,15 +589,95 @@ public OpenAIAssistantAgentTests() this._emptyKernel = new Kernel(); } - private Task CreateAgentAsync() + private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition) { - OpenAIAssistantDefinition definition = - new() + this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition)); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + this._emptyKernel, + this.CreateTestConfiguration(), + definition); + + ValidateAgentDefinition(agent, definition); + } + + private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantDefinition sourceDefinition) + { + // Verify fundamental state + Assert.NotNull(agent); + Assert.NotNull(agent.Id); + Assert.False(agent.IsDeleted); + Assert.NotNull(agent.Definition); + Assert.Equal(sourceDefinition.ModelId, agent.Definition.ModelId); + + // Verify core properties + Assert.Equal(sourceDefinition.Instructions ?? string.Empty, agent.Instructions); + Assert.Equal(sourceDefinition.Name ?? string.Empty, agent.Name); + Assert.Equal(sourceDefinition.Description ?? string.Empty, agent.Description); + + // Verify options + Assert.Equal(sourceDefinition.Temperature, agent.Definition.Temperature); + Assert.Equal(sourceDefinition.TopP, agent.Definition.TopP); + Assert.Equal(sourceDefinition.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens); + Assert.Equal(sourceDefinition.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens); + Assert.Equal(sourceDefinition.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled); + Assert.Equal(sourceDefinition.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount); + + // Verify tool definitions + int expectedToolCount = 0; + + bool hasCodeInterpreter = false; + if (sourceDefinition.EnableCodeInterpreter) + { + hasCodeInterpreter = true; + ++expectedToolCount; + } + + Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any()); + + bool hasFileSearch = false; + if (sourceDefinition.EnableFileSearch) + { + hasFileSearch = true; + ++expectedToolCount; + } + + Assert.Equal(hasFileSearch, agent.Tools.OfType().Any()); + + Assert.Equal(expectedToolCount, agent.Tools.Count); + + // Verify metadata + Assert.NotNull(agent.Definition.Metadata); + if (sourceDefinition.ExecutionOptions == null) + { + Assert.Equal(sourceDefinition.Metadata ?? new Dictionary(), agent.Definition.Metadata); + } + else // Additional metadata present when execution options are defined + { + Assert.Equal((sourceDefinition.Metadata?.Count ?? 0) + 1, agent.Definition.Metadata.Count); + + if (sourceDefinition.Metadata != null) { - ModelId = "testmodel", - }; + foreach (var (key, value) in sourceDefinition.Metadata) + { + string? targetValue = agent.Definition.Metadata[key]; + Assert.NotNull(targetValue); + Assert.Equal(value, targetValue); + } + } + } + + // Verify detail definition + Assert.Equal(sourceDefinition.VectorStoreId, agent.Definition.VectorStoreId); + Assert.Equal(sourceDefinition.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds); + } - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); + private Task CreateAgentAsync() + { + OpenAIAssistantDefinition definition = new("testmodel"); + + this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition)); return OpenAIAssistantAgent.CreateAsync( @@ -382,14 +686,10 @@ private Task CreateAgentAsync() definition); } - private OpenAIAssistantConfiguration CreateTestConfiguration(bool targetAzure = false, bool useVersion = false) - { - return new(apiKey: "fakekey", endpoint: targetAzure ? "https://localhost" : null) - { - HttpClient = this._httpClient, - Version = useVersion ? AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview : null, - }; - } + private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false) + => targetAzure ? + OpenAIClientProvider.ForAzureOpenAI(apiKey: "fakekey", endpoint: new Uri("https://localhost"), this._httpClient) : + OpenAIClientProvider.ForOpenAI(apiKey: "fakekey", endpoint: null, this._httpClient); private void SetupResponse(HttpStatusCode statusCode, string content) { @@ -423,58 +723,114 @@ public void MyFunction(int index) private static class ResponseContent { - public const string CreateAgentSimple = - """ + public static string CreateAgentPayload(OpenAIAssistantDefinition definition) + { + StringBuilder builder = new(); + builder.AppendLine("{"); + builder.AppendLine(@" ""id"": ""asst_abc123"","); + builder.AppendLine(@" ""object"": ""assistant"","); + builder.AppendLine(@" ""created_at"": 1698984975,"); + builder.AppendLine(@$" ""name"": ""{definition.Name}"","); + builder.AppendLine(@$" ""description"": ""{definition.Description}"","); + builder.AppendLine(@$" ""instructions"": ""{definition.Instructions}"","); + builder.AppendLine(@$" ""model"": ""{definition.ModelId}"","); + + bool hasCodeInterpreter = definition.EnableCodeInterpreter; + bool hasCodeInterpreterFiles = (definition.CodeInterpreterFileIds?.Count ?? 0) > 0; + bool hasFileSearch = definition.EnableFileSearch; + if (!hasCodeInterpreter && !hasFileSearch) { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {} + builder.AppendLine(@" ""tools"": [],"); } - """; + else + { + builder.AppendLine(@" ""tools"": ["); - public const string CreateAgentFull = - """ + if (hasCodeInterpreter) + { + builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}"); + } + + if (hasFileSearch) + { + builder.AppendLine(@" { ""type"": ""file_search"" }"); + } + + builder.AppendLine(" ],"); + } + + if (!hasCodeInterpreterFiles && !hasFileSearch) { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": "testname", - "description": "testdescription", - "model": "gpt-4-turbo", - "instructions": "testinstructions", - "tools": [], - "file_ids": [], - "metadata": {} + builder.AppendLine(@" ""tool_resources"": {},"); } - """; + else + { + builder.AppendLine(@" ""tool_resources"": {"); - public const string CreateAgentWithEverything = - """ + if (hasCodeInterpreterFiles) + { + string fileIds = string.Join(",", definition.CodeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\"")); + builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}"); + } + + if (hasFileSearch) + { + builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{definition.VectorStoreId}""] }}"); + } + + builder.AppendLine(" },"); + } + + if (definition.Temperature.HasValue) { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [ + builder.AppendLine(@$" ""temperature"": {definition.Temperature},"); + } + + if (definition.TopP.HasValue) + { + builder.AppendLine(@$" ""top_p"": {definition.TopP},"); + } + + bool hasExecutionOptions = definition.ExecutionOptions != null; + int metadataCount = (definition.Metadata?.Count ?? 0); + if (metadataCount == 0 && !hasExecutionOptions) + { + builder.AppendLine(@" ""metadata"": {}"); + } + else + { + int index = 0; + builder.AppendLine(@" ""metadata"": {"); + + if (hasExecutionOptions) { - "type": "code_interpreter" - }, + string serializedExecutionOptions = JsonSerializer.Serialize(definition.ExecutionOptions); + builder.AppendLine(@$" ""{OpenAIAssistantAgent.OptionsMetadataKey}"": ""{JsonEncodedText.Encode(serializedExecutionOptions)}""{(metadataCount > 0 ? "," : string.Empty)}"); + } + + if (metadataCount > 0) { - "type": "retrieval" + foreach (var (key, value) in definition.Metadata!) + { + builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}"); + ++index; + } } - ], - "file_ids": ["#1", "#2"], - "metadata": {"a": "1"} + + builder.AppendLine(" }"); + } + + builder.AppendLine("}"); + + return builder.ToString(); + } + + public const string CreateAgentWithEverything = + """ + { + "tool_resources": { + "file_search": { "vector_store_ids": ["#vs"] } + }, } """; @@ -748,7 +1104,6 @@ private static class ResponseContent "model": "gpt-4-turbo", "instructions": "You are a helpful assistant designed to make me better at coding!", "tools": [], - "file_ids": [], "metadata": {} }, { @@ -760,7 +1115,6 @@ private static class ResponseContent "model": "gpt-4-turbo", "instructions": "You are a helpful assistant designed to make me better at coding!", "tools": [], - "file_ids": [], "metadata": {} }, { @@ -772,7 +1126,6 @@ private static class ResponseContent "model": "gpt-4-turbo", "instructions": null, "tools": [], - "file_ids": [], "metadata": {} } ], @@ -796,7 +1149,6 @@ private static class ResponseContent "model": "gpt-4-turbo", "instructions": "You are a helpful assistant designed to make me better at coding!", "tools": [], - "file_ids": [], "metadata": {} } ], diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs deleted file mode 100644 index 3708ab50ab97..000000000000 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Net.Http; -using Azure.AI.OpenAI.Assistants; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Xunit; - -namespace SemanticKernel.Agents.UnitTests.OpenAI; - -/// -/// Unit testing of . -/// -public class OpenAIAssistantConfigurationTests -{ - /// - /// Verify initial state. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationInitialState() - { - OpenAIAssistantConfiguration config = new(apiKey: "testkey"); - - Assert.Equal("testkey", config.ApiKey); - Assert.Null(config.Endpoint); - Assert.Null(config.HttpClient); - Assert.Null(config.Version); - } - - /// - /// Verify assignment. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationAssignment() - { - using HttpClient client = new(); - - OpenAIAssistantConfiguration config = - new(apiKey: "testkey", endpoint: "https://localhost") - { - HttpClient = client, - Version = AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview, - }; - - Assert.Equal("testkey", config.ApiKey); - Assert.Equal("https://localhost", config.Endpoint); - Assert.NotNull(config.HttpClient); - Assert.Equal(AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview, config.Version); - } - - /// - /// Verify secure endpoint. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationThrows() - { - using HttpClient client = new(); - - Assert.Throws( - () => new OpenAIAssistantConfiguration(apiKey: "testkey", endpoint: "http://localhost")); - } -} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs index b17b61211c18..f8547f375f13 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Text.Json; using Microsoft.SemanticKernel.Agents.OpenAI; using Xunit; @@ -16,17 +17,27 @@ public class OpenAIAssistantDefinitionTests [Fact] public void VerifyOpenAIAssistantDefinitionInitialState() { - OpenAIAssistantDefinition definition = new(); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); - Assert.Null(definition.Id); + // Assert + Assert.Equal(string.Empty, definition.Id); + Assert.Equal("testmodel", definition.ModelId); Assert.Null(definition.Name); - Assert.Null(definition.ModelId); Assert.Null(definition.Instructions); Assert.Null(definition.Description); Assert.Null(definition.Metadata); - Assert.Null(definition.FileIds); + Assert.Null(definition.ExecutionOptions); + Assert.Null(definition.Temperature); + Assert.Null(definition.TopP); + Assert.False(definition.EnableFileSearch); + Assert.Null(definition.VectorStoreId); + Assert.Null(definition.CodeInterpreterFileIds); Assert.False(definition.EnableCodeInterpreter); - Assert.False(definition.EnableRetrieval); + Assert.False(definition.EnableJsonResponse); + + // Act and Assert + ValidateSerialization(definition); } /// @@ -35,28 +46,80 @@ public void VerifyOpenAIAssistantDefinitionInitialState() [Fact] public void VerifyOpenAIAssistantDefinitionAssignment() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { Id = "testid", Name = "testname", - ModelId = "testmodel", Instructions = "testinstructions", Description = "testdescription", - FileIds = ["id"], + EnableFileSearch = true, + VectorStoreId = "#vs", Metadata = new Dictionary() { { "a", "1" } }, + Temperature = 2, + TopP = 0, + ExecutionOptions = + new() + { + MaxCompletionTokens = 1000, + MaxPromptTokens = 1000, + ParallelToolCallsEnabled = false, + TruncationMessageCount = 12, + }, + CodeInterpreterFileIds = ["file1"], EnableCodeInterpreter = true, - EnableRetrieval = true, + EnableJsonResponse = true, }; + // Assert Assert.Equal("testid", definition.Id); Assert.Equal("testname", definition.Name); Assert.Equal("testmodel", definition.ModelId); Assert.Equal("testinstructions", definition.Instructions); Assert.Equal("testdescription", definition.Description); + Assert.True(definition.EnableFileSearch); + Assert.Equal("#vs", definition.VectorStoreId); + Assert.Equal(2, definition.Temperature); + Assert.Equal(0, definition.TopP); + Assert.NotNull(definition.ExecutionOptions); + Assert.Equal(1000, definition.ExecutionOptions.MaxCompletionTokens); + Assert.Equal(1000, definition.ExecutionOptions.MaxPromptTokens); + Assert.Equal(12, definition.ExecutionOptions.TruncationMessageCount); + Assert.False(definition.ExecutionOptions.ParallelToolCallsEnabled); Assert.Single(definition.Metadata); - Assert.Single(definition.FileIds); + Assert.Single(definition.CodeInterpreterFileIds); Assert.True(definition.EnableCodeInterpreter); - Assert.True(definition.EnableRetrieval); + Assert.True(definition.EnableJsonResponse); + + // Act and Assert + ValidateSerialization(definition); + } + + private static void ValidateSerialization(OpenAIAssistantDefinition source) + { + string json = JsonSerializer.Serialize(source); + + OpenAIAssistantDefinition? target = JsonSerializer.Deserialize(json); + + Assert.NotNull(target); + Assert.Equal(source.Id, target.Id); + Assert.Equal(source.Name, target.Name); + Assert.Equal(source.ModelId, target.ModelId); + Assert.Equal(source.Instructions, target.Instructions); + Assert.Equal(source.Description, target.Description); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + Assert.Equal(source.Temperature, target.Temperature); + Assert.Equal(source.TopP, target.TopP); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter); + Assert.Equal(source.ExecutionOptions?.MaxCompletionTokens, target.ExecutionOptions?.MaxCompletionTokens); + Assert.Equal(source.ExecutionOptions?.MaxPromptTokens, target.ExecutionOptions?.MaxPromptTokens); + Assert.Equal(source.ExecutionOptions?.TruncationMessageCount, target.ExecutionOptions?.TruncationMessageCount); + Assert.Equal(source.ExecutionOptions?.ParallelToolCallsEnabled, target.ExecutionOptions?.ParallelToolCallsEnabled); + AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds); + AssertCollection.Equal(source.Metadata, target.Metadata); } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs new file mode 100644 index 000000000000..99cbe012f183 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs @@ -0,0 +1,100 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIAssistantInvocationOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsInitialState() + { + // Arrange + OpenAIAssistantInvocationOptions options = new(); + + // Assert + Assert.Null(options.ModelName); + Assert.Null(options.Metadata); + Assert.Null(options.Temperature); + Assert.Null(options.TopP); + Assert.Null(options.ParallelToolCallsEnabled); + Assert.Null(options.MaxCompletionTokens); + Assert.Null(options.MaxPromptTokens); + Assert.Null(options.TruncationMessageCount); + Assert.Null(options.EnableJsonResponse); + Assert.False(options.EnableCodeInterpreter); + Assert.False(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + /// + /// Verify initialization. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsAssignment() + { + // Arrange + OpenAIAssistantInvocationOptions options = + new() + { + ModelName = "testmodel", + Metadata = new Dictionary() { { "a", "1" } }, + MaxCompletionTokens = 1000, + MaxPromptTokens = 1000, + ParallelToolCallsEnabled = false, + TruncationMessageCount = 12, + Temperature = 2, + TopP = 0, + EnableCodeInterpreter = true, + EnableJsonResponse = true, + EnableFileSearch = true, + }; + + // Assert + Assert.Equal("testmodel", options.ModelName); + Assert.Equal(2, options.Temperature); + Assert.Equal(0, options.TopP); + Assert.Equal(1000, options.MaxCompletionTokens); + Assert.Equal(1000, options.MaxPromptTokens); + Assert.Equal(12, options.TruncationMessageCount); + Assert.False(options.ParallelToolCallsEnabled); + Assert.Single(options.Metadata); + Assert.True(options.EnableCodeInterpreter); + Assert.True(options.EnableJsonResponse); + Assert.True(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + private static void ValidateSerialization(OpenAIAssistantInvocationOptions source) + { + // Act + string json = JsonSerializer.Serialize(source); + + OpenAIAssistantInvocationOptions? target = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(target); + Assert.Equal(source.ModelName, target.ModelName); + Assert.Equal(source.Temperature, target.Temperature); + Assert.Equal(source.TopP, target.TopP); + Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens); + Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens); + Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount); + Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter); + Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + AssertCollection.Equal(source.Metadata, target.Metadata); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs new file mode 100644 index 000000000000..7799eb26c305 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Net.Http; +using Azure.Core; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIClientProviderTests +{ + /// + /// Verify that provisioning of client for Azure OpenAI. + /// + [Fact] + public void VerifyOpenAIClientFactoryTargetAzureByKey() + { + // Arrange + OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI("key", new Uri("https://localhost")); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for Azure OpenAI. + /// + [Fact] + public void VerifyOpenAIClientFactoryTargetAzureByCredential() + { + // Arrange + Mock mockCredential = new(); + OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(mockCredential.Object, new Uri("https://localhost")); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for OpenAI. + /// + [Theory] + [InlineData(null)] + [InlineData("http://myproxy:9819")] + public void VerifyOpenAIClientFactoryTargetOpenAINoKey(string? endpoint) + { + // Arrange + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(endpoint != null ? new Uri(endpoint) : null); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for OpenAI. + /// + [Theory] + [InlineData("key", null)] + [InlineData("key", "http://myproxy:9819")] + public void VerifyOpenAIClientFactoryTargetOpenAIByKey(string key, string? endpoint) + { + // Arrange + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(key, endpoint != null ? new Uri(endpoint) : null); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that the factory can create a client with http proxy. + /// + [Fact] + public void VerifyOpenAIClientFactoryWithHttpClient() + { + // Arrange + using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") }; + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient); + + // Assert + Assert.NotNull(provider.Client); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs new file mode 100644 index 000000000000..1689bec1f828 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIThreadCreationOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void OpenAIThreadCreationOptionsInitialState() + { + // Arrange + OpenAIThreadCreationOptions options = new(); + + // Assert + Assert.Null(options.Messages); + Assert.Null(options.Metadata); + Assert.Null(options.VectorStoreId); + Assert.Null(options.CodeInterpreterFileIds); + + // Act and Assert + ValidateSerialization(options); + } + + /// + /// Verify initialization. + /// + [Fact] + public void OpenAIThreadCreationOptionsAssignment() + { + // Arrange + OpenAIThreadCreationOptions options = + new() + { + Messages = [new ChatMessageContent(AuthorRole.User, "test")], + VectorStoreId = "#vs", + Metadata = new Dictionary() { { "a", "1" } }, + CodeInterpreterFileIds = ["file1"], + }; + + // Assert + Assert.Single(options.Messages); + Assert.Single(options.Metadata); + Assert.Equal("#vs", options.VectorStoreId); + Assert.Single(options.CodeInterpreterFileIds); + + // Act and Assert + ValidateSerialization(options); + } + + private static void ValidateSerialization(OpenAIThreadCreationOptions source) + { + // Act + string json = JsonSerializer.Serialize(source); + + OpenAIThreadCreationOptions? target = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(target); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds); + AssertCollection.Equal(source.Messages, target.Messages, m => m.Items.Count); // ChatMessageContent already validated for deep serialization + AssertCollection.Equal(source.Metadata, target.Metadata); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs new file mode 100644 index 000000000000..e75a962dfc5e --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class RunPollingOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void RunPollingOptionsInitialStateTest() + { + // Arrange + RunPollingOptions options = new(); + + // Assert + Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval); + Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff); + Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay); + Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsAssignmentTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + MessageSynchronizationDelay = TimeSpan.FromSeconds(5), + }; + + // Assert + Assert.Equal(3, options.RunPollingInterval.TotalSeconds); + Assert.Equal(4, options.RunPollingBackoff.TotalSeconds); + Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds); + Assert.Equal(8, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsGetIntervalTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + }; + + // Assert + Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8)); + Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs new file mode 100644 index 000000000000..31a7654fcfc6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Net.Http; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests; + +/// +/// Helper for AzureOpenAI test purposes. +/// +internal static class AzureOpenAITestHelper +{ + /// + /// Reads test response from file for mocking purposes. + /// + /// Name of the file with test response. + internal static string GetTestResponse(string fileName) + { + return File.ReadAllText($"./TestData/{fileName}"); + } + + /// + /// Reads test response from file and create . + /// + /// Name of the file with test response. + internal static StreamContent GetTestResponseAsStream(string fileName) + { + return new StreamContent(File.OpenRead($"./TestData/{fileName}")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj new file mode 100644 index 000000000000..a0a695a6719c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj @@ -0,0 +1,47 @@ + + + + + SemanticKernel.Connectors.AzureOpenAI.UnitTests + $(AssemblyName) + net8.0 + true + enable + false + $(NoWarn);SKEXP0001;SKEXP0010;CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,IDE1006 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..d8e8cdac1658 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions; + +/// +/// Unit tests for the kernel builder extensions in the class. +/// +public sealed class AzureOpenAIKernelBuilderExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"), + _ => builder + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureOpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is AzureOpenAIChatCompletionService); + } + + #endregion + + #region Text embeddings + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + #endregion + + #region Text to audio + + [Fact] + public void KernelBuilderAddAzureOpenAITextToAudioAddsValidService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key") + .Build() + .GetRequiredService(); + + // Assert + Assert.IsType(service); + } + + #endregion + + #region Text to image + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderExtensionsAddAzureOpenAITextToImageService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAITextToImage("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextToImage("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAITextToImageService); + } + + #endregion + + #region Audio to text + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://endpoint"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAIAudioToText("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIAudioToText("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.IsType(service); + } + + #endregion + + public enum InitializationType + { + ApiKey, + TokenCredential, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..2def01271aa6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions; + +/// +/// Unit tests for the service collection extensions in the class. +/// +public sealed class AzureOpenAIServiceCollectionExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"), + _ => builder.Services + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureOpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is AzureOpenAIChatCompletionService); + } + + #endregion + + #region Text embeddings + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + #endregion + + #region Text to audio + + [Fact] + public void ServiceCollectionAddAzureOpenAITextToAudioAddsValidService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.IsType(service); + } + + #endregion + + #region Text to image + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionExtensionsAddAzureOpenAITextToImageService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://localhost"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAITextToImage("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextToImage("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAITextToImageService); + } + + #endregion + + #region Audio to text + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://endpoint"), "key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAIAudioToText("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIAudioToText("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAIAudioToTextService); + } + + #endregion + + public enum InitializationType + { + ApiKey, + TokenCredential, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs similarity index 56% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs index 6100c434c878..a7f2f6b5a83d 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs @@ -2,16 +2,18 @@ using System; using System.Net.Http; +using System.Text; using System.Threading.Tasks; using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; using Moq; -using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -36,12 +38,12 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act var service = includeLoggerFactory ? - new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id"); + new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); } [Theory] @@ -56,8 +58,8 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto new AzureOpenAIAudioToTextService("deployment", "https://endpoint", credentials, "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); } [Theory] @@ -66,14 +68,26 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act - var client = new OpenAIClient("key"); + var client = new AzureOpenAIClient(new Uri("http://host"), "key"); var service = includeLoggerFactory ? new AzureOpenAIAudioToTextService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : new AzureOpenAIAudioToTextService("deployment", client, "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); + } + + [Fact] + public void ItThrowsIfDeploymentNameIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", azureOpenAIClient: new(new Uri("http://host"), "apikey"))); + Assert.Throws(() => new AzureOpenAIAudioToTextService("", "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService("", azureOpenAIClient: new(new Uri("http://host"), "apikey"))); + Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, azureOpenAIClient: new(new Uri("http://host"), "apikey"))); } [Theory] @@ -81,7 +95,7 @@ public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAudioToTextExecutionSettings? settings, Type expectedExceptionType) { // Arrange - var service = new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent("Test audio-to-text response") @@ -95,6 +109,34 @@ public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAu Assert.IsType(expectedExceptionType, exception); } + [Theory] + [InlineData("verbose_json")] + [InlineData("json")] + [InlineData("vtt")] + [InlineData("srt")] + public async Task ItRespectResultFormatExecutionSettingAsync(string format) + { + // Arrange + var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent("Test audio-to-text response") + }; + + // Act + var settings = new OpenAIAudioToTextExecutionSettings("file.mp3") { ResponseFormat = format }; + var result = await service.GetTextContentsAsync(new AudioContent(new BinaryData("data"), mimeType: null), settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + Assert.NotNull(result); + + var multiPartData = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + var multiPartBreak = multiPartData.Substring(0, multiPartData.IndexOf("\r\n", StringComparison.OrdinalIgnoreCase)); + + Assert.Contains($"{format}\r\n{multiPartBreak}", multiPartData); + } + [Fact] public async Task GetTextContentByDefaultWorksCorrectlyAsync() { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs similarity index 65% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index 22be8458c2cc..9302b75c39bf 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -10,16 +10,18 @@ using System.Text.Json; using System.Threading.Tasks; using Azure.AI.OpenAI; +using Azure.AI.OpenAI.Chat; using Azure.Core; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; -using Xunit; +using OpenAI.Chat; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; /// /// Unit tests for @@ -80,7 +82,7 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act - var client = new OpenAIClient("key"); + var client = new AzureOpenAIClient(new Uri("http://host"), "key"); var service = includeLoggerFactory ? new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : new AzureOpenAIChatCompletionService("deployment", client, "model-id"); @@ -97,7 +99,7 @@ public async Task GetTextContentsWorksCorrectlyAsync() var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); // Act @@ -107,73 +109,36 @@ public async Task GetTextContentsWorksCorrectlyAsync() Assert.True(result.Count > 0); Assert.Equal("Test chat response", result[0].Text); - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + var usage = result[0].Metadata?["Usage"] as ChatTokenUsage; Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(55, usage.InputTokens); + Assert.Equal(100, usage.OutputTokens); Assert.Equal(155, usage.TotalTokens); } - [Fact] - public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}") - }); - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([])); - - Assert.Equal("Chat completions not found", exception.Message); - } - - [Theory] - [InlineData(0)] - [InlineData(129)] - public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) - { - // Arrange - var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings)); - - Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); - } - [Fact] public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() + var settings = new AzureOpenAIPromptExecutionSettings() { MaxTokens = 123, Temperature = 0.6, TopP = 0.5, FrequencyPenalty = 1.6, PresencePenalty = 1.2, - ResultsPerPrompt = 5, Seed = 567, TokenSelectionBiases = new Dictionary { { 2, 3 } }, StopSequences = ["stop_sequence"], Logprobs = true, TopLogprobs = 5, - AzureChatExtensionsOptions = new AzureChatExtensionsOptions + AzureChatDataSource = new AzureSearchChatDataSource() { - Extensions = - { - new AzureSearchChatExtensionConfiguration - { - SearchEndpoint = new Uri("http://test-search-endpoint"), - IndexName = "test-index-name" - } - } + Endpoint = new Uri("http://test-search-endpoint"), + IndexName = "test-index-name", + Authentication = DataSourceAuthentication.FromApiKey("api-key"), } }; @@ -185,7 +150,7 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); // Act @@ -227,7 +192,6 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); - Assert.Equal(5, content.GetProperty("n").GetInt32()); Assert.Equal(567, content.GetProperty("seed").GetInt32()); Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); @@ -249,18 +213,18 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings + var settings = new AzureOpenAIPromptExecutionSettings { ResponseFormat = responseFormat }; this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); // Act - var result = await service.GetChatMessageContentsAsync([], settings); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings); // Assert var requestContent = this._messageHandlerStub.RequestContents[0]; @@ -279,28 +243,28 @@ public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior beh // Arrange var kernel = Kernel.CreateBuilder().Build(); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = behavior }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = behavior }; this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.True(result.Count > 0); Assert.Equal("Test chat response", result[0].Content); - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + var usage = result[0].Metadata?["Usage"] as ChatTokenUsage; Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(55, usage.InputTokens); + Assert.Equal(100, usage.OutputTokens); Assert.Equal(155, usage.TotalTokens); - Assert.Equal("stop", result[0].Metadata?["FinishReason"]); + Assert.Equal("Stop", result[0].Metadata?["FinishReason"]); } [Fact] @@ -325,15 +289,15 @@ public async Task GetChatMessageContentsWithFunctionCallAsync() kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.True(result.Count > 0); @@ -361,19 +325,19 @@ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttempt kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var responses = new List(); for (var i = 0; i < ModelResponsesCount; i++) { - responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }); + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }); } this._messageHandlerStub.ResponsesToReturn = responses; // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); @@ -398,15 +362,15 @@ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync() kernel.Plugins.Add(plugin); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.Equal(1, functionCallCount); @@ -434,7 +398,7 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { @@ -448,7 +412,67 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Text); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); + } + + [Fact] + public async Task GetStreamingChatContentsWithAsynchronousFilterWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_async_filter_response.txt"))); + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }); + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync("Prompt").GetAsyncEnumerator(); + +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + await enumerator.MoveNextAsync(); + var message = enumerator.Current; + + Assert.IsType(message.InnerContent); + var update = (StreamingChatCompletionUpdate)message.InnerContent; + var promptResults = update.GetContentFilterResultForPrompt(); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Violence.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.SelfHarm.Severity); + Assert.False(promptResults.Jailbreak.Detected); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + + var filterResults = update.GetContentFilterResultForResponse(); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.SelfHarm.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Violence.Severity); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + filterResults = update.GetContentFilterResultForResponse(); + Assert.False(filterResults.ProtectedMaterialCode.Detected); + Assert.False(filterResults.ProtectedMaterialText.Detected); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } [Fact] @@ -456,7 +480,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { @@ -470,7 +494,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Content); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } [Fact] @@ -495,10 +519,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_multiple_function_calls_test_response.txt") }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -507,10 +531,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() await enumerator.MoveNextAsync(); Assert.Equal("Test chat streaming response", enumerator.Current.Content); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); await enumerator.MoveNextAsync(); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); // Keep looping until the end of stream while (await enumerator.MoveNextAsync()) @@ -539,13 +563,13 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvo kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var responses = new List(); for (var i = 0; i < ModelResponsesCount; i++) { - responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }); + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") }); } this._messageHandlerStub.ResponsesToReturn = responses; @@ -578,10 +602,10 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() kernel.Plugins.Add(plugin); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -591,7 +615,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() // Function Tool Call Streaming (One Chunk) await enumerator.MoveNextAsync(); Assert.Equal("Test chat streaming response", enumerator.Current.Content); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); // Chat Completion Streaming (1st Chunk) await enumerator.MoveNextAsync(); @@ -599,7 +623,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() // Chat Completion Streaming (2nd Chunk) await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); Assert.Equal(1, functionCallCount); @@ -629,11 +653,11 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync() const string SystemMessage = "This is test system message"; var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); IKernelBuilder builder = Kernel.CreateBuilder(); @@ -673,11 +697,11 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS const string CollectionItemPrompt = "This is collection item prompt"; var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); var chatHistory = new ChatHistory(); @@ -727,9 +751,9 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -737,7 +761,7 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Fake prompt"); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act var result = await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -789,9 +813,9 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT public async Task FunctionCallsShouldBeReturnedToLLMAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -807,7 +831,7 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync() new ChatMessageContent(AuthorRole.Assistant, items) ]; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -847,9 +871,9 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync() public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -866,7 +890,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn ]) }; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -895,9 +919,9 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -911,7 +935,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage ]) }; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -936,6 +960,150 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); } + [Fact] + public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) }; + this._messageHandlerStub.ResponsesToReturn.Add(firstResponse); + + using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponsesToReturn.Add(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + this._messageHandlerStub.ResponsesToReturn.Add(firstResponse); + + using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + this._messageHandlerStub.ResponsesToReturn.Add(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel)) + { + } + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + public void Dispose() { this._httpClient.Dispose(); @@ -950,10 +1118,27 @@ public void Dispose() public static TheoryData ResponseFormats => new() { - { new FakeChatCompletionsResponseFormat(), null }, { "json_object", "json_object" }, { "text", "text" } }; - private sealed class FakeChatCompletionsResponseFormat : ChatCompletionsResponseFormat; + private sealed class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter + { + private readonly Func, Task> _callback; + + public AutoFunctionInvocationFilter(Func, Task> callback) + { + this._callback = callback; + } + + public AutoFunctionInvocationFilter(Action> callback) + { + this._callback = (c, n) => { callback(c, n); return Task.CompletedTask; }; + } + + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + await this._callback(context, next); + } + } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..4e8a12b9b69b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public class AzureOpenAITextEmbeddingGenerationServiceTests +{ + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextEmbeddingGenerationServiceTests() + { + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected(bool includeLoggerFactory) + { + // Arrange + var sut = includeLoggerFactory ? + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", modelId: "model", dimensions: 2, loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", modelId: "model", dimensions: 2); + var sutWithAzureOpenAIClient = new AzureOpenAITextEmbeddingGenerationService("deployment-name", new AzureOpenAIClient(new Uri("https://endpoint"), new ApiKeyCredential("apiKey")), modelId: "model", dimensions: 2, loggerFactory: this._mockLoggerFactory.Object); + + // Assert + Assert.NotNull(sut); + Assert.NotNull(sutWithAzureOpenAIClient); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("model", sutWithAzureOpenAIClient.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty() + { + // Arrange + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key"); + + // Act + var result = await sut.GenerateEmbeddingsAsync([], null, CancellationToken.None); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", httpClient: client); + + // Act + var result = await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None); + + // Assert + Assert.Single(result); + Assert.Equal(4, result[0].Length); + } + + [Fact] + public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-multiple-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", httpClient: client); + + // Act & Assert + await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs new file mode 100644 index 000000000000..c087b7a28d41 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs @@ -0,0 +1,215 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextToAudioServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextToAudioServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorsAddRequiredMetadata(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id"); + + // Assert + Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("deployment-name", service.Attributes["DeploymentName"]); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new AzureOpenAITextToAudioService(null!, "https://endpoint", "api-key")); + Assert.Throws(() => new AzureOpenAITextToAudioService("", "https://endpoint", "api-key")); + Assert.Throws(() => new AzureOpenAITextToAudioService(" ", "https://endpoint", "api-key")); + } + + [Fact] + public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync() + { + // Arrange + var settingsWithInvalidVoice = new OpenAITextToAudioExecutionSettings(""); + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await Assert.ThrowsAsync(() => service.GetAudioContentsAsync("Some text", settingsWithInvalidVoice)); + } + + [Fact] + public async Task GetAudioContentByDefaultWorksCorrectlyAsync() + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova")); + + // Assert + var audioData = result[0].Data!.Value; + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Theory] + [InlineData("echo", "wav")] + [InlineData("fable", "opus")] + [InlineData("onyx", "flac")] + [InlineData("nova", "aac")] + [InlineData("shimmer", "pcm")] + public async Task GetAudioContentVoicesWorksCorrectlyAsync(string voice, string format) + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings(voice) { ResponseFormat = format }); + + // Assert + var requestBody = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent!); + Assert.NotNull(requestBody); + Assert.Equal(voice, requestBody["voice"]?.ToString()); + Assert.Equal(format, requestBody["response_format"]?.ToString()); + + var audioData = result[0].Data!.Value; + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Fact] + public async Task GetAudioContentThrowsWhenVoiceIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice"))); + } + + [Fact] + public async Task GetAudioContentThrowsWhenFormatIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings() { ResponseFormat = "not supported" })); + } + + [Theory] + [InlineData(true, "http://local-endpoint")] + [InlineData(false, "https://endpoint")] + public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + if (useHttpClientBaseAddress) + { + this._httpClient.BaseAddress = new Uri("http://local-endpoint/path"); + } + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint/path", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova")); + + // Assert + Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); + } + + [Theory] + [InlineData("model-1", "model-2", "deployment", "model-2")] + [InlineData("model-1", null, "deployment", "model-1")] + [InlineData(null, "model-2", "deployment", "model-2")] + [InlineData(null, null, "deployment", "deployment")] + public async Task GetAudioContentPrioritizesModelIdOverDeploymentNameAsync(string? modelInSettings, string? modelInConstructor, string deploymentName, string expectedModel) + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + var service = new AzureOpenAITextToAudioService(deploymentName, "https://endpoint", "api-key", modelInConstructor, this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova") { ModelId = modelInSettings }); + + // Assert + var requestBody = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent!); + Assert.Equal(expectedModel, requestBody?["model"]?.ToString()); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs new file mode 100644 index 000000000000..60aed7875b56 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextToImageServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextToImageServiceTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-response.txt")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Fact] + public void ConstructorsAddRequiredMetadata() + { + // Case #1 + var sut = new AzureOpenAITextToImageService("deployment", "https://api-host/", "api-key", "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + + // Case #2 + sut = new AzureOpenAITextToImageService("deployment", "https://api-hostapi/", new Mock().Object, "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + + // Case #3 + sut = new AzureOpenAITextToImageService("deployment", new AzureOpenAIClient(new Uri("https://api-host/"), "api-key"), "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(256, 256, "dall-e-2")] + [InlineData(512, 512, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-3")] + [InlineData(1024, 1792, "dall-e-3")] + [InlineData(1792, 1024, "dall-e-3")] + [InlineData(123, 321, "custom-model-1")] + [InlineData(179, 124, "custom-model-2")] + public async Task GenerateImageWorksCorrectlyAsync(int width, int height, string modelId) + { + // Arrange + var sut = new AzureOpenAITextToImageService("deployment", "https://api-host", "api-key", modelId, this._httpClient, loggerFactory: this._mockLoggerFactory.Object); + + // Act + var result = await sut.GenerateImageAsync("description", width, height); + + // Assert + Assert.Equal("https://image-url/", result); + + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); // {"prompt":"description","model":"deployment","response_format":"url","size":"179x124"} + Assert.NotNull(request); + Assert.Equal("description", request["prompt"]?.ToString()); + Assert.Equal("deployment", request["model"]?.ToString()); + Assert.Equal("url", request["response_format"]?.ToString()); + Assert.Equal($"{width}x{height}", request["size"]?.ToString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItShouldUseProvidedEndpoint(bool useTokeCredential) + { + // Arrange + var sut = useTokeCredential ? + new AzureOpenAITextToImageService("deployment", endpoint: "https://api-host", new Mock().Object, "dall-e-3", this._httpClient) : + new AzureOpenAITextToImageService("deployment", endpoint: "https://api-host", "api-key", "dall-e-3", this._httpClient); + + // Act + var result = await sut.GenerateImageAsync("description", 1024, 1024); + + // Assert + Assert.StartsWith("https://api-host", this._messageHandlerStub.RequestUri?.AbsoluteUri); + } + + [Theory] + [InlineData(true, "")] + [InlineData(true, null)] + [InlineData(false, "")] + [InlineData(false, null)] + public async Task ItShouldUseHttpClientUriIfNoEndpointProvided(bool useTokeCredential, string? endpoint) + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + + var sut = useTokeCredential ? + new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, new Mock().Object, "dall-e-3", this._httpClient) : + new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, "api-key", "dall-e-3", this._httpClient); + + // Act + var result = await sut.GenerateImageAsync("description", 1024, 1024); + + // Assert + Assert.StartsWith("https://api-host", this._messageHandlerStub.RequestUri?.AbsoluteUri); + } + + [Theory] + [InlineData(true, "")] + [InlineData(true, null)] + [InlineData(false, "")] + [InlineData(false, null)] + public void ItShouldThrowExceptionIfNoEndpointProvided(bool useTokeCredential, string? endpoint) + { + // Arrange + this._httpClient.BaseAddress = null; + + // Act & Assert + if (useTokeCredential) + { + Assert.Throws(() => new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, new Mock().Object, "dall-e-3", this._httpClient)); + } + else + { + Assert.Throws(() => new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, "api-key", "dall-e-3", this._httpClient)); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..918cc9e3eb90 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs @@ -0,0 +1,294 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings; + +/// +/// Unit tests for class. +/// +public class AzureOpenAIPromptExecutionSettingsTests +{ + [Fact] + public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults() + { + // Arrange + var maxTokensSettings = 128; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(null, maxTokensSettings); + + // Assert + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.FrequencyPenalty); + Assert.Null(executionSettings.PresencePenalty); + Assert.Null(executionSettings.StopSequences); + Assert.Null(executionSettings.TokenSelectionBiases); + Assert.Null(executionSettings.TopLogprobs); + Assert.Null(executionSettings.Logprobs); + Assert.Null(executionSettings.AzureChatDataSource); + Assert.Equal(maxTokensSettings, executionSettings.MaxTokens); + } + + [Fact] + public void ItUsesExistingOpenAIExecutionSettings() + { + // Arrange + AzureOpenAIPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + MaxTokens = 128, + Logprobs = true, + TopLogprobs = 5, + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.Equal(actualSettings, executionSettings); + } + + [Fact] + public void ItCanUseOpenAIExecutionSettings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() { + { "max_tokens", 1000 }, + { "temperature", 0 } + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1000, executionSettings.MaxTokens); + Assert.Equal(0, executionSettings.Temperature); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", 128 }, + { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } }, + { "seed", 123456 }, + { "logprobs", true }, + { "top_logprobs", 5 }, + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", "0.7" }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", "128" }, + { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } }, + { "seed", 123456 }, + { "logprobs", true }, + { "top_logprobs", 5 } + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase() + { + // Arrange + var json = """ + { + "temperature": 0.7, + "top_p": 0.7, + "frequency_penalty": 0.7, + "presence_penalty": 0.7, + "stop_sequences": [ "foo", "bar" ], + "chat_system_prompt": "chat system prompt", + "token_selection_biases": { "1": 2, "3": 4 }, + "max_tokens": 128, + "seed": 123456, + "logprobs": true, + "top_logprobs": 5 + } + """; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Theory] + [InlineData("", "")] + [InlineData("System prompt", "System prompt")] + public void ItUsesCorrectChatSystemPrompt(string chatSystemPrompt, string expectedChatSystemPrompt) + { + // Arrange & Act + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = chatSystemPrompt }; + + // Assert + Assert.Equal(expectedChatSystemPrompt, settings.ChatSystemPrompt); + } + + [Fact] + public void PromptExecutionSettingsCloneWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Act + var clone = executionSettings!.Clone(); + + // Assert + Assert.Equal(executionSettings.ModelId, clone.ModelId); + Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + } + + [Fact] + public void PromptExecutionSettingsFreezeWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": [ "DONE" ], + "token_selection_biases": { "1": 2, "3": 4 } + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Act + executionSettings!.Freeze(); + + // Assert + Assert.True(executionSettings.IsFrozen); + Assert.Throws(() => executionSettings.ModelId = "gpt-4"); + Assert.Throws(() => executionSettings.Temperature = 1); + Assert.Throws(() => executionSettings.TopP = 1); + Assert.Throws(() => executionSettings.StopSequences?.Add("STOP")); + Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6)); + + executionSettings!.Freeze(); // idempotent + Assert.True(executionSettings.IsFrozen); + } + + [Fact] + public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences() + { + // Arrange + var executionSettings = new AzureOpenAIPromptExecutionSettings { StopSequences = [] }; + + // Act +#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions + var executionSettingsWithData = AzureOpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); +#pragma warning restore CS0618 + // Assert + Assert.Null(executionSettingsWithData.StopSequences); + } + + [Fact] + public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings() + { + // Arrange + OpenAIPromptExecutionSettings originalSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + MaxTokens = 128, + Logprobs = true, + Seed = 123456, + TopLogprobs = 5, + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + private static void AssertExecutionSettings(AzureOpenAIPromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7, executionSettings.TopP); + Assert.Equal(0.7, executionSettings.FrequencyPenalty); + Assert.Equal(0.7, executionSettings.PresencePenalty); + Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + Assert.Equal(123456, executionSettings.Seed); + Assert.Equal(true, executionSettings.Logprobs); + Assert.Equal(5, executionSettings.TopLogprobs); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..100b0b1901d8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Azure.AI.OpenAI.Chat; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings; + +/// +/// Unit tests for class. +/// +public class OpenAIPromptExecutionSettingsTests +{ + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromAzureOpenAIPromptExecutionSettings() + { + // Arrange + AzureOpenAIPromptExecutionSettings originalSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + MaxTokens = 128, + Logprobs = true, + Seed = 123456, + TopLogprobs = 5, + AzureChatDataSource = new AzureSearchChatDataSource + { + Endpoint = new Uri("https://test-host"), + Authentication = DataSourceAuthentication.FromApiKey("api-key"), + IndexName = "index-name" + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7, executionSettings.TopP); + Assert.Equal(0.7, executionSettings.FrequencyPenalty); + Assert.Equal(0.7, executionSettings.PresencePenalty); + Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + Assert.Equal(123456, executionSettings.Seed); + Assert.Equal(true, executionSettings.Logprobs); + Assert.Equal(5, executionSettings.TopLogprobs); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt new file mode 100644 index 000000000000..078ad45af412 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt @@ -0,0 +1,13 @@ +data: {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"jailbreak":{"filtered":false,"detected":false},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]} + +data: {"choices":[{"delta":{"content":"","role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{"content":"Kindness"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{},"finish_reason":"stop","index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"protected_material_code":{"filtered":false,"detected":false},"protected_material_text":{"filtered":false,"detected":false}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: [DONE] \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json similarity index 92% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json index eb695f292c96..3ffa6b00cc3f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json @@ -11,7 +11,7 @@ "content": null, "tool_calls": [ { - "id": "tool-call-id-1", + "id": "1", "type": "function", "function": { "name": "MyPlugin-Function1", @@ -19,7 +19,7 @@ } }, { - "id": "tool-call-id-2", + "id": "2", "type": "function", "function": { "name": "MyPlugin-Function2", diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..c8aeb98e8b82 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt new file mode 100644 index 000000000000..46a9581cf0cc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt @@ -0,0 +1,20 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + }, + { + "object": "embedding", + "index": 1, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt new file mode 100644 index 000000000000..c715b851b78c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt @@ -0,0 +1,15 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.txt new file mode 100644 index 000000000000..1d6f2150b1d5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.txt @@ -0,0 +1,9 @@ +{ + "created": 1702575371, + "data": [ + { + "revised_prompt": "A photo capturing the diversity of the Earth's landscapes.", + "url": "https://image-url/" + } + ] +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj new file mode 100644 index 000000000000..15d88496159b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj @@ -0,0 +1,38 @@ + + + + + Microsoft.SemanticKernel.Connectors.AzureOpenAI + $(AssemblyName) + net8.0;netstandard2.0 + true + $(NoWarn);NU5104;SKEXP0001,SKEXP0010 + true + + + + rc + + + + + + + + Semantic Kernel - Azure OpenAI connectors + Semantic Kernel connectors for Azure OpenAI. Contains clients for chat completion, embedding and DALL-E text to image. + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs new file mode 100644 index 000000000000..1f68ada62532 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Diagnostics; +using OpenAI.Chat; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with Azure OpenAI services. +/// +internal partial class AzureClientCore +{ + /// + protected override OpenAIPromptExecutionSettings GetSpecializedExecutionSettings(PromptExecutionSettings? executionSettings) + => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + /// + protected override Activity? StartCompletionActivity(ChatHistory chatHistory, PromptExecutionSettings settings) + => ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentName, ModelProvider, chatHistory, settings); + + /// + protected override ChatCompletionOptions CreateChatCompletionOptions( + OpenAIPromptExecutionSettings executionSettings, + ChatHistory chatHistory, + ToolCallingConfig toolCallingConfig, + Kernel? kernel) + { + if (executionSettings is not AzureOpenAIPromptExecutionSettings azureSettings) + { + return base.CreateChatCompletionOptions(executionSettings, chatHistory, toolCallingConfig, kernel); + } + + var options = new ChatCompletionOptions + { + MaxTokens = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + TopP = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, + Seed = executionSettings.Seed, + EndUserId = executionSettings.User, + TopLogProbabilityCount = executionSettings.TopLogprobs, + IncludeLogProbabilities = executionSettings.Logprobs, + }; + + var responseFormat = GetResponseFormat(executionSettings); + if (responseFormat is not null) + { + options.ResponseFormat = responseFormat; + } + + if (toolCallingConfig.Choice is not null) + { + options.ToolChoice = toolCallingConfig.Choice; + } + + if (toolCallingConfig.Tools is { Count: > 0 } tools) + { + options.Tools.AddRange(tools); + } + + if (azureSettings.AzureChatDataSource is not null) + { +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + options.AddDataSource(azureSettings.AzureChatDataSource); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + } + + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.LogitBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + return options; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs similarity index 50% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs index be0428faa799..e246f90667b6 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs @@ -1,102 +1,135 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel.Primitives; using System.Net.Http; -using Azure; +using System.Threading; using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Http; +using OpenAI; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// -/// Core implementation for Azure OpenAI clients, providing common functionality and properties. +/// Base class for AI clients that provides common functionality for interacting with Azure OpenAI services. /// -internal sealed class AzureOpenAIClientCore : ClientCore +internal partial class AzureClientCore : ClientCore { /// - /// Gets the key used to store the deployment name in the dictionary. + /// Gets the key used to store the deployment name in the dictionary. /// - public static string DeploymentNameKey => "DeploymentName"; + internal static string DeploymentNameKey => "DeploymentName"; /// - /// OpenAI / Azure OpenAI Client + /// Deployment name. /// - internal override OpenAIClient Client { get; } + internal string DeploymentName { get; set; } = string.Empty; /// - /// Initializes a new instance of the class using API Key authentication. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart /// Custom for HTTP requests. /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( + internal AzureClientCore( string deploymentName, string endpoint, string apiKey, HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) + ILogger? logger = null) { Verify.NotNullOrWhiteSpace(deploymentName); Verify.NotNullOrWhiteSpace(endpoint); Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); Verify.NotNullOrWhiteSpace(apiKey); - var options = GetOpenAIClientOptions(httpClient); + var options = GetAzureOpenAIClientOptions(httpClient); - this.DeploymentOrModelName = deploymentName; + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; this.Endpoint = new Uri(endpoint); - this.Client = new OpenAIClient(this.Endpoint, new AzureKeyCredential(apiKey), options); + this.Client = new AzureOpenAIClient(this.Endpoint, apiKey, options); + + this.AddAttribute(DeploymentNameKey, deploymentName); } /// - /// Initializes a new instance of the class supporting AAD authentication. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. /// Custom for HTTP requests. /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( + internal AzureClientCore( string deploymentName, string endpoint, TokenCredential credential, HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) + ILogger? logger = null) { Verify.NotNullOrWhiteSpace(deploymentName); Verify.NotNullOrWhiteSpace(endpoint); Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - var options = GetOpenAIClientOptions(httpClient); + var options = GetAzureOpenAIClientOptions(httpClient); - this.DeploymentOrModelName = deploymentName; + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; this.Endpoint = new Uri(endpoint); - this.Client = new OpenAIClient(this.Endpoint, credential, options); + this.Client = new AzureOpenAIClient(this.Endpoint, credential, options); + + this.AddAttribute(DeploymentNameKey, deploymentName); } /// - /// Initializes a new instance of the class using the specified OpenAIClient. + /// Initializes a new instance of the class.. /// Note: instances created this way might not have the default diagnostics settings, /// it's up to the caller to configure the client. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . + /// Custom . /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( + internal AzureClientCore( string deploymentName, - OpenAIClient openAIClient, - ILogger? logger = null) : base(logger) + AzureOpenAIClient openAIClient, + ILogger? logger = null) { Verify.NotNullOrWhiteSpace(deploymentName); Verify.NotNull(openAIClient); - this.DeploymentOrModelName = deploymentName; + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; this.Client = openAIClient; this.AddAttribute(DeploymentNameKey, deploymentName); } + + /// Gets options to use for an OpenAIClient + /// Custom for HTTP requests. + /// Optional API version. + /// An instance of . + internal static AzureOpenAIClientOptions GetAzureOpenAIClientOptions(HttpClient? httpClient, AzureOpenAIClientOptions.ServiceVersion? serviceVersion = null) + { + AzureOpenAIClientOptions options = serviceVersion is not null + ? new(serviceVersion.Value) { ApplicationId = HttpHeaderConstant.Values.UserAgent } + : new() { ApplicationId = HttpHeaderConstant.Values.UserAgent }; + + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureClientCore))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout + } + + return options; + } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs new file mode 100644 index 000000000000..86fbc7ac59df --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs @@ -0,0 +1,524 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure OpenAI connectors. +/// +public static class AzureOpenAIKernelBuilderExtensions +{ + #region Chat Completion + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + #region Text Embedding + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNull(credential); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + #endregion + + #region Text-to-Audio + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToAudio( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToAudioService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Images + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + credentials, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + apiVersion)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + apiVersion)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs new file mode 100644 index 000000000000..13d44f785212 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs @@ -0,0 +1,496 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure OpenAI connectors. +/// +public static class AzureOpenAIServiceCollectionExtensions +{ + #region Chat Completion + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + #endregion + + #region Text Embedding + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNull(credential); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService(), + dimensions)); + } + + #endregion + + #region Text-to-Audio + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToAudio( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToAudioService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + #endregion + + #region Images + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + credentials, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + apiVersion)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Maximum number of attempts to retrieve the text to image operation result. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + int maxRetryCount = 5) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + #endregion + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs index 2e065876b779..b8dfccdf06bf 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs @@ -11,22 +11,22 @@ using Microsoft.SemanticKernel.AudioToText; using Microsoft.SemanticKernel.Services; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI audio-to-text service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class AzureOpenAIAudioToTextService : IAudioToTextService { /// Core implementation shared by Azure OpenAI services. - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -42,12 +42,12 @@ public AzureOpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates an instance of the with AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -63,25 +63,25 @@ public AzureOpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates an instance of the using the specified . + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . + /// Custom . /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. public AzureOpenAIAudioToTextService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// @@ -90,5 +90,5 @@ public Task> GetTextContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetTextContentFromAudioAsync(content, executionSettings, cancellationToken); + => this._client.GetTextFromAudioContentsAsync(this._client.DeploymentName, content, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs similarity index 72% rename from dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs index 04da5d2dc1e3..47cca54662bc 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs @@ -11,7 +11,7 @@ using Microsoft.SemanticKernel.Services; using Microsoft.SemanticKernel.TextGeneration; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI chat completion service. @@ -19,10 +19,10 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public sealed class AzureOpenAIChatCompletionService : IChatCompletionService, ITextGenerationService { /// Core implementation shared by Azure OpenAI clients. - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; /// - /// Create an instance of the connector with API key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -38,13 +38,13 @@ public AzureOpenAIChatCompletionService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Create an instance of the connector with AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -60,43 +60,43 @@ public AzureOpenAIChatCompletionService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates a new client instance using the specified . + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . + /// Custom . /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. public AzureOpenAIChatCompletionService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + => this._client.GetChatMessageContentsAsync(this._client.DeploymentName, chatHistory, executionSettings, kernel, cancellationToken); /// public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + => this._client.GetStreamingChatMessageContentsAsync(this._client.DeploymentName, chatHistory, executionSettings, kernel, cancellationToken); /// public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + => this._client.GetChatAsTextContentsAsync(this._client.DeploymentName, prompt, executionSettings, kernel, cancellationToken); /// public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); + => this._client.GetChatAsTextStreamingContentsAsync(this._client.DeploymentName, prompt, executionSettings, kernel, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs index 63fbdbdccb2b..bcbcfbb67087 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs @@ -12,7 +12,7 @@ using Microsoft.SemanticKernel.Embeddings; using Microsoft.SemanticKernel.Services; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI text embedding service. @@ -20,11 +20,11 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; [Experimental("SKEXP0010")] public sealed class AzureOpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService { - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; private readonly int? _dimensions; /// - /// Creates a new client instance using API Key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -42,15 +42,15 @@ public AzureOpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - /// Creates a new client instance supporting AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -68,37 +68,37 @@ public AzureOpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - /// Creates a new client. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. + /// Custom for HTTP requests. /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. public AzureOpenAITextEmbeddingGenerationService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task>> GenerateEmbeddingsAsync( @@ -106,6 +106,6 @@ public Task>> GenerateEmbeddingsAsync( Kernel? kernel = null, CancellationToken cancellationToken = default) { - return this._core.GetEmbeddingsAsync(data, kernel, this._dimensions, cancellationToken); + return this._client.GetEmbeddingsAsync(this._client.DeploymentName, data, kernel, this._dimensions, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs similarity index 60% rename from dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs index 47aac090ab05..0b9f98302a0b 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs @@ -1,26 +1,33 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.Services; using Microsoft.SemanticKernel.TextToAudio; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI text-to-audio service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class AzureOpenAITextToAudioService : ITextToAudioService { /// - /// Azure OpenAI text-to-audio client for HTTP operations. + /// Azure OpenAI text-to-audio client. /// - private readonly AzureOpenAITextToAudioClient _client; + private readonly AzureClientCore _client; + + /// + /// Azure OpenAI model id. + /// + private readonly string? _modelId; /// public IReadOnlyDictionary Attributes => this._client.Attributes; @@ -31,7 +38,7 @@ public sealed class AzureOpenAITextToAudioService : ITextToAudioService public static string DeploymentNameKey => "DeploymentName"; /// - /// Creates an instance of the connector with API key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -47,10 +54,19 @@ public AzureOpenAITextToAudioService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._client = new(deploymentName, endpoint, apiKey, modelId, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextToAudioService))); + var url = !string.IsNullOrWhiteSpace(httpClient?.BaseAddress?.AbsoluteUri) ? httpClient!.BaseAddress!.AbsoluteUri : endpoint; + + var options = AzureClientCore.GetAzureOpenAIClientOptions( + httpClient, + AzureOpenAIClientOptions.ServiceVersion.V2024_05_01_Preview); // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#text-to-speech + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(url), apiKey, options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextToAudioService))); - this._client.AddAttribute(DeploymentNameKey, deploymentName); this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + + this._modelId = modelId; } /// @@ -59,5 +75,13 @@ public Task> GetAudioContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._client.GetAudioContentsAsync(text, executionSettings, cancellationToken); + => this._client.GetAudioContentsAsync(this.GetModelId(executionSettings), text, executionSettings, cancellationToken); + + private string GetModelId(PromptExecutionSettings? executionSettings) + { + return + !string.IsNullOrWhiteSpace(this._modelId) ? this._modelId! : + !string.IsNullOrWhiteSpace(executionSettings?.ModelId) ? executionSettings!.ModelId! : + this._client.DeploymentName; + } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs new file mode 100644 index 000000000000..b066cc4b3e66 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Azure OpenAI text to image service. +/// +[Experimental("SKEXP0010")] +public class AzureOpenAITextToImageService : ITextToImageService +{ + private readonly AzureClientCore _client; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + /// Azure OpenAI service API version, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + public AzureOpenAITextToImageService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null, + string? apiVersion = null) + { + Verify.NotNullOrWhiteSpace(apiKey); + + var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; + if (connectorEndpoint is null) + { + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + } + + var options = AzureClientCore.GetAzureOpenAIClientOptions( + httpClient, + AzureOpenAIClientOptions.ServiceVersion.V2024_05_01_Preview); // DALL-E 3 is supported in the latest API releases - https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#image-generation + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(connectorEndpoint), apiKey, options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + /// Azure OpenAI service API version, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + public AzureOpenAITextToImageService( + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null, + string? apiVersion = null) + { + Verify.NotNull(credential); + + var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; + if (connectorEndpoint is null) + { + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + } + + var options = AzureClientCore.GetAzureOpenAIClientOptions( + httpClient, + AzureOpenAIClientOptions.ServiceVersion.V2024_05_01_Preview); // DALL-E 3 is supported in the latest API releases - https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#image-generation + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(connectorEndpoint), credential, options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextToImageService( + string deploymentName, + AzureOpenAIClient azureOpenAIClient, + string? modelId, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(azureOpenAIClient); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._client.GenerateImageAsync(this._client.DeploymentName, description, width, height, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs new file mode 100644 index 000000000000..90a20d3435b7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.AI.OpenAI.Chat; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Execution settings for an AzureOpenAI completion request. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class AzureOpenAIPromptExecutionSettings : OpenAIPromptExecutionSettings +{ + /// + /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions. + /// This property is compatible only with Azure OpenAI. + /// + [Experimental("SKEXP0010")] + [JsonIgnore] + public AzureChatDataSource? AzureChatDataSource + { + get => this._azureChatDataSource; + + set + { + this.ThrowIfFrozen(); + this._azureChatDataSource = value; + } + } + + /// + public override PromptExecutionSettings Clone() + { + var settings = base.Clone(); + settings.AzureChatDataSource = this.AzureChatDataSource; + return settings; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + public static new AzureOpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + if (executionSettings is null) + { + return new AzureOpenAIPromptExecutionSettings() + { + MaxTokens = defaultMaxTokens + }; + } + + if (executionSettings is AzureOpenAIPromptExecutionSettings settings) + { + return settings; + } + + if (executionSettings is OpenAIPromptExecutionSettings openAISettings) + { + return openAISettings.Clone(); + } + + // Having the object as the type of the value to serialize is important to ensure all properties of the settings are serialized. + // Otherwise, only the properties ServiceId and ModelId from the public API of the PromptExecutionSettings class will be serialized. + var json = JsonSerializer.Serialize(executionSettings); + + var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + + return openAIExecutionSettings!; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] + public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + var settings = FromExecutionSettings(executionSettings, defaultMaxTokens); + + if (settings.StopSequences?.Count == 0) + { + // Azure OpenAI WithData API does not allow to send empty array of stop sequences + // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0" + settings.StopSequences = null; + } + + return settings; + } + + #region private ================================================================================ + + private AzureChatDataSource? _azureChatDataSource; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj new file mode 100644 index 000000000000..e187080a2c35 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj @@ -0,0 +1,88 @@ + + + + SemanticKernel.Connectors.OpenAI.UnitTests + $(AssemblyName) + net8.0 + true + enable + false + $(NoWarn);SKEXP0001;SKEXP0070;SKEXP0010;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs similarity index 77% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs index 9a5103f83e6e..5df2fb54cdb5 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs @@ -2,10 +2,10 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Net; using System.Net.Http; -using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; @@ -13,7 +13,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; public sealed class AutoFunctionInvocationFilterTests : IDisposable { @@ -126,6 +126,7 @@ public async Task FiltersAreExecutedCorrectlyOnStreamingAsync() public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() { // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); var executionOrder = new List(); var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); @@ -149,10 +150,10 @@ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() builder.Plugins.Add(plugin); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); @@ -182,6 +183,7 @@ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming) { // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); var executionOrder = new List(); var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); @@ -214,10 +216,10 @@ public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming) builder.Plugins.Add(plugin); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); builder.Services.AddSingleton(filter1); builder.Services.AddSingleton(filter2); @@ -307,10 +309,12 @@ public async Task FilterCanHandleExceptionAsync() this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); - var chatCompletion = new OpenAIChatCompletionService(modelId: "test-model-id", apiKey: "test-api-key", httpClient: this._httpClient); + var chatCompletion = new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("System message"); // Act var result = await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); @@ -345,7 +349,8 @@ public async Task FilterCanHandleExceptionOnStreamingAsync() this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); - var chatCompletion = new OpenAIChatCompletionService(modelId: "test-model-id", apiKey: "test-api-key", httpClient: this._httpClient); + var chatCompletion = new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + var chatHistory = new ChatHistory(); var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; @@ -385,8 +390,8 @@ public async Task FiltersCanSkipFunctionExecutionAsync() filterInvocations++; }); - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/filters_multiple_function_calls_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -568,131 +573,6 @@ public async Task PostFilterCanTerminateOperationOnStreamingAsync() Assert.Equal(AuthorRole.Tool, lastMessageContent.Role); } - [Fact] - public async Task FilterContextHasCancellationTokenAsync() - { - // Arrange - using var cancellationTokenSource = new CancellationTokenSource(); - int firstFunctionInvocations = 0; - int secondFunctionInvocations = 0; - - var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => - { - cancellationTokenSource.Cancel(); - firstFunctionInvocations++; - return parameter; - }, "Function1"); - - var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => - { - secondFunctionInvocations++; - return parameter; - }, "Function2"); - - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); - - var kernel = this.GetKernelWithFilter(plugin, async (context, next) => - { - Assert.Equal(cancellationTokenSource.Token, context.CancellationToken); - - await next(context); - - context.CancellationToken.ThrowIfCancellationRequested(); - }); - - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - var arguments = new KernelArguments(new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }); - - // Act & Assert - var exception = await Assert.ThrowsAsync(() - => kernel.InvokePromptAsync("Test prompt", arguments, cancellationToken: cancellationTokenSource.Token)); - - Assert.Equal(1, firstFunctionInvocations); - Assert.Equal(0, secondFunctionInvocations); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task FilterContextHasOperationRelatedInformationAsync(bool isStreaming) - { - // Arrange - List actualToolCallIds = []; - List actualChatMessageContents = []; - - var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); - - var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); - var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); - - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); - - var filter = new AutoFunctionInvocationFilter(async (context, next) => - { - actualToolCallIds.Add(context.ToolCallId); - actualChatMessageContents.Add(context.ChatMessageContent); - - await next(context); - }); - - var builder = Kernel.CreateBuilder(); - - builder.Plugins.Add(plugin); - - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); - - builder.Services.AddSingleton(filter); - - var kernel = builder.Build(); - - var arguments = new KernelArguments(new OpenAIPromptExecutionSettings - { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions - }); - - // Act - if (isStreaming) - { - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", arguments)) - { } - } - else - { - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - await kernel.InvokePromptAsync("Test prompt", arguments); - } - - // Assert - Assert.Equal(["tool-call-id-1", "tool-call-id-2"], actualToolCallIds); - - foreach (var chatMessageContent in actualChatMessageContents) - { - var content = chatMessageContent as OpenAIChatMessageContent; - - Assert.NotNull(content); - - Assert.Equal("test-model-id", content.ModelId); - Assert.Equal(AuthorRole.Assistant, content.Role); - Assert.Equal(2, content.ToolCalls.Count); - } - } - public void Dispose() { this._httpClient.Dispose(); @@ -705,18 +585,18 @@ public void Dispose() private static List GetFunctionCallingResponses() { return [ - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) } + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_test_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_test_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) } ]; } private static List GetFunctionCallingStreamingResponses() { return [ - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) } + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_test_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_test_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) } ]; } #pragma warning restore CA2000 @@ -731,10 +611,10 @@ private Kernel GetKernelWithFilter( builder.Plugins.Add(plugin); builder.Services.AddSingleton(filter); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); return builder.Build(); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs new file mode 100644 index 000000000000..f41b204058ed --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs @@ -0,0 +1,243 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Moq; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; +public partial class ClientCoreTests +{ + [Fact] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected() + { + // Act + var logger = new Mock>().Object; + var openAIClient = new OpenAIClient("key"); + + var clientCoreModelConstructor = new ClientCore("model1", "apiKey"); + var clientCoreOpenAIClientConstructor = new ClientCore("model1", openAIClient, logger: logger); + + // Assert + Assert.NotNull(clientCoreModelConstructor); + Assert.NotNull(clientCoreOpenAIClientConstructor); + + Assert.Equal("model1", clientCoreModelConstructor.ModelId); + Assert.Equal("model1", clientCoreOpenAIClientConstructor.ModelId); + + Assert.NotNull(clientCoreModelConstructor.Client); + Assert.NotNull(clientCoreOpenAIClientConstructor.Client); + Assert.Equal(openAIClient, clientCoreOpenAIClientConstructor.Client); + Assert.Equal(NullLogger.Instance, clientCoreModelConstructor.Logger); + Assert.Equal(logger, clientCoreOpenAIClientConstructor.Logger); + } + + [Theory] + [InlineData(null, null)] + [InlineData("http://localhost", null)] + [InlineData(null, "http://localhost")] + [InlineData("http://localhost-1", "http://localhost-2")] + public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? providedEndpoint) + { + // Arrange + Uri? endpoint = null; + HttpClient? client = null; + if (providedEndpoint is not null) + { + endpoint = new Uri(providedEndpoint); + } + + if (clientBaseAddress is not null) + { + client = new HttpClient { BaseAddress = new Uri(clientBaseAddress) }; + } + + // Act + var clientCore = new ClientCore("model", "apiKey", endpoint: endpoint, httpClient: client); + + // Assert + Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/"), clientCore.Endpoint); + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.EndpointKey)); + Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString() ?? "https://api.openai.com/", clientCore.Attributes[AIServiceExtensions.EndpointKey]); + + client?.Dispose(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItAddOrganizationHeaderWhenProvidedAsync(bool organizationIdProvided) + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore( + modelId: "model", + apiKey: "test", + organizationId: (organizationIdProvided) ? "organization" : null, + httpClient: client); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + if (organizationIdProvided) + { + Assert.True(handler.RequestHeaders!.Contains("OpenAI-Organization")); + Assert.Equal("organization", handler.RequestHeaders.GetValues("OpenAI-Organization").FirstOrDefault()); + } + else + { + Assert.False(handler.RequestHeaders!.Contains("OpenAI-Organization")); + } + } + + [Fact] + public async Task ItAddSemanticKernelHeadersOnEachRequestAsync() + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore(modelId: "model", apiKey: "test", httpClient: client); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + Assert.True(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.Equal(HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore)), handler.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).FirstOrDefault()); + + Assert.True(handler.RequestHeaders.Contains("User-Agent")); + Assert.Contains(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Fact] + public async Task ItDoNotAddSemanticKernelHeadersWhenOpenAIClientIsProvidedAsync() + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore( + modelId: "model", + openAIClient: new OpenAIClient( + "test", + new OpenAIClientOptions() + { + Transport = new HttpClientPipelineTransport(client), + RetryPolicy = new ClientRetryPolicy(maxRetries: 0), + NetworkTimeout = Timeout.InfiniteTimeSpan + })); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + Assert.False(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.DoesNotContain(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData("value")] + public void ItAddAttributesButDoesNothingIfNullOrEmpty(string? value) + { + // Arrange + var clientCore = new ClientCore("model", "apikey"); + // Act + + clientCore.AddAttribute("key", value); + + // Assert + if (string.IsNullOrEmpty(value)) + { + Assert.False(clientCore.Attributes.ContainsKey("key")); + } + else + { + Assert.True(clientCore.Attributes.ContainsKey("key")); + Assert.Equal(value, clientCore.Attributes["key"]); + } + } + + [Fact] + public void ItAddModelIdAttributeAsExpected() + { + // Arrange + var expectedModelId = "modelId"; + + // Act + var clientCore = new ClientCore(expectedModelId, "apikey"); + var clientCoreBreakingGlass = new ClientCore(expectedModelId, new OpenAIClient(" ")); + + // Assert + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.True(clientCoreBreakingGlass.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.Equal(expectedModelId, clientCore.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal(expectedModelId, clientCoreBreakingGlass.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItAddOrNotOrganizationIdAttributeWhenProvided() + { + // Arrange + var expectedOrganizationId = "organizationId"; + + // Act + var clientCore = new ClientCore("modelId", "apikey", expectedOrganizationId); + var clientCoreWithoutOrgId = new ClientCore("modelId", "apikey"); + + // Assert + Assert.True(clientCore.Attributes.ContainsKey(ClientCore.OrganizationKey)); + Assert.Equal(expectedOrganizationId, clientCore.Attributes[ClientCore.OrganizationKey]); + Assert.False(clientCoreWithoutOrgId.Attributes.ContainsKey(ClientCore.OrganizationKey)); + } + + [Fact] + public void ItThrowsWhenNotUsingCustomEndpointAndApiKeyIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new ClientCore("modelId", " ")); + Assert.Throws(() => new ClientCore("modelId", "")); + Assert.Throws(() => new ClientCore("modelId", apiKey: null!)); + } + + [Fact] + public void ItDoesNotThrowWhenUsingCustomEndpointAndApiKeyIsNotProvided() + { + // Act & Assert + ClientCore? clientCore = null; + clientCore = new ClientCore("modelId", " ", endpoint: new Uri("http://localhost")); + clientCore = new ClientCore("modelId", "", endpoint: new Uri("http://localhost")); + clientCore = new ClientCore("modelId", apiKey: null!, endpoint: new Uri("http://localhost")); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs index cf2d32d3b52e..e638dc803be0 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs @@ -2,13 +2,12 @@ using System.Collections; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -19,10 +18,10 @@ public sealed class OpenAIChatMessageContentTests public void ConstructorsWorkCorrectly() { // Arrange - List toolCalls = [new FakeChatCompletionsToolCall("id")]; + List toolCalls = [ChatToolCall.CreateFunctionToolCall("id", "name", "args")]; // Act - var content1 = new OpenAIChatMessageContent(new ChatRole("user"), "content1", "model-id1", toolCalls) { AuthorName = "Fred" }; + var content1 = new OpenAIChatMessageContent(ChatMessageRole.User, "content1", "model-id1", toolCalls) { AuthorName = "Fred" }; var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls); // Assert @@ -34,11 +33,9 @@ public void ConstructorsWorkCorrectly() public void GetOpenAIFunctionToolCallsReturnsCorrectList() { // Arrange - List toolCalls = [ - new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), - new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), - new FakeChatCompletionsToolCall("id3"), - new FakeChatCompletionsToolCall("id4")]; + List toolCalls = [ + ChatToolCall.CreateFunctionToolCall("id1", "name", string.Empty), + ChatToolCall.CreateFunctionToolCall("id2", "name", string.Empty)]; var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", toolCalls); var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", []); @@ -65,11 +62,9 @@ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata) new CustomReadOnlyDictionary(new Dictionary { { "key", "value" } }) : new Dictionary { { "key", "value" } }; - List toolCalls = [ - new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), - new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), - new FakeChatCompletionsToolCall("id3"), - new FakeChatCompletionsToolCall("id4")]; + List toolCalls = [ + ChatToolCall.CreateFunctionToolCall("id1", "name", string.Empty), + ChatToolCall.CreateFunctionToolCall("id2", "name", string.Empty)]; // Act var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content1", "model-id1", [], metadata); @@ -83,9 +78,9 @@ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata) Assert.Equal(2, content2.Metadata.Count); Assert.Equal("value", content2.Metadata["key"]); - Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]); + Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]); - var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List; + var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List; Assert.NotNull(actualToolCalls); Assert.Equal(2, actualToolCalls.Count); @@ -97,7 +92,7 @@ private void AssertChatMessageContent( AuthorRole expectedRole, string expectedContent, string expectedModelId, - IReadOnlyList expectedToolCalls, + IReadOnlyList expectedToolCalls, OpenAIChatMessageContent actualContent, string? expectedName = null) { @@ -108,9 +103,6 @@ private void AssertChatMessageContent( Assert.Same(expectedToolCalls, actualContent.ToolCalls); } - private sealed class FakeChatCompletionsToolCall(string id) : ChatCompletionsToolCall(id) - { } - private sealed class CustomReadOnlyDictionary(IDictionary dictionary) : IReadOnlyDictionary // explicitly not implementing IDictionary<> { public TValue this[TKey key] => dictionary[key]; @@ -119,7 +111,7 @@ private sealed class CustomReadOnlyDictionary(IDictionary dictionary.Count; public bool ContainsKey(TKey key) => dictionary.ContainsKey(key); public IEnumerator> GetEnumerator() => dictionary.GetEnumerator(); - public bool TryGetValue(TKey key, [MaybeNullWhen(false)] out TValue value) => dictionary.TryGetValue(key, out value); + public bool TryGetValue(TKey key, out TValue value) => dictionary.TryGetValue(key, out value!); IEnumerator IEnumerable.GetEnumerator() => dictionary.GetEnumerator(); } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs similarity index 83% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs index a9f94d81a673..1967ee882ec8 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs @@ -4,12 +4,12 @@ using System.ComponentModel; using System.Linq; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; public sealed class OpenAIFunctionTests { @@ -52,11 +52,11 @@ public void ItCanConvertToFunctionDefinitionWithNoPluginName() OpenAIFunction sut = KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.").Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = sut.ToFunctionDefinition(); + ChatTool result = sut.ToFunctionDefinition(); // Assert - Assert.Equal(sut.FunctionName, result.Name); - Assert.Equal(sut.Description, result.Description); + Assert.Equal(sut.FunctionName, result.FunctionName); + Assert.Equal(sut.Description, result.FunctionDescription); } [Fact] @@ -69,7 +69,7 @@ public void ItCanConvertToFunctionDefinitionWithNullParameters() var result = sut.ToFunctionDefinition(); // Assert - Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.Parameters.ToString()); + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.FunctionParameters.ToString()); } [Fact] @@ -82,11 +82,11 @@ public void ItCanConvertToFunctionDefinitionWithPluginName() }).GetFunctionsMetadata()[0].ToOpenAIFunction(); // Act - FunctionDefinition result = sut.ToFunctionDefinition(); + ChatTool result = sut.ToFunctionDefinition(); // Assert - Assert.Equal("myplugin-myfunc", result.Name); - Assert.Equal(sut.Description, result.Description); + Assert.Equal("myplugin-myfunc", result.FunctionName); + Assert.Equal(sut.Description, result.FunctionDescription); } [Fact] @@ -104,15 +104,15 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndReturnParamete OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + ChatTool functionDefinition = sut.ToFunctionDefinition(); var exp = JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)); - var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)); + var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters)); Assert.NotNull(functionDefinition); - Assert.Equal("Tests-TestFunction", functionDefinition.Name); - Assert.Equal("My test function", functionDefinition.Description); - Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName); + Assert.Equal("My test function", functionDefinition.FunctionDescription); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters))); } [Fact] @@ -130,12 +130,12 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndNoReturnParame OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + ChatTool functionDefinition = sut.ToFunctionDefinition(); Assert.NotNull(functionDefinition); - Assert.Equal("Tests-TestFunction", functionDefinition.Name); - Assert.Equal("My test function", functionDefinition.Description); - Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName); + Assert.Equal("My test function", functionDefinition.FunctionDescription); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters))); } [Fact] @@ -147,8 +147,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypes() parameters: [new KernelParameterMetadata("param1")]).Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = f.ToFunctionDefinition(); - ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + ChatTool result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!; // Assert Assert.NotNull(pd.properties); @@ -167,8 +167,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypesButWithDescript parameters: [new KernelParameterMetadata("param1") { Description = "something neat" }]).Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = f.ToFunctionDefinition(); - ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + ChatTool result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!; // Assert Assert.NotNull(pd.properties); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs similarity index 86% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs index 3b4d8b4ca0d4..0c3f6bfa2c4b 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs @@ -2,11 +2,11 @@ using System.Collections.Generic; using System.Text; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -19,7 +19,7 @@ public sealed class OpenAIFunctionToolCallTests public void FullyQualifiedNameReturnsValidName(string toolCallName, string expectedName) { // Arrange - var toolCall = new ChatCompletionsFunctionToolCall("id", toolCallName, string.Empty); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", toolCallName, string.Empty); var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); // Act & Assert @@ -31,7 +31,7 @@ public void FullyQualifiedNameReturnsValidName(string toolCallName, string expec public void ToStringReturnsCorrectValue() { // Arrange - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}"); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}"); var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); // Act & Assert @@ -47,7 +47,7 @@ public void ConvertToolCallUpdatesWithEmptyIndexesReturnsEmptyToolCalls() var functionArgumentBuildersByIndex = new Dictionary(); // Act - var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); @@ -65,7 +65,7 @@ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls() var functionArgumentBuildersByIndex = new Dictionary { { 3, new("test-argument") } }; // Act - var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); @@ -76,7 +76,7 @@ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls() var toolCall = toolCalls[0]; Assert.Equal("test-id", toolCall.Id); - Assert.Equal("test-function", toolCall.Name); - Assert.Equal("test-argument", toolCall.Arguments); + Assert.Equal("test-function", toolCall.FunctionName); + Assert.Equal("test-argument", toolCall.FunctionArguments); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs new file mode 100644 index 000000000000..0b005900a53b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; + +#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions + +/// +/// Unit tests for class. +/// +public sealed class OpenAIStreamingChatMessageContentTests +{ + [Fact] + public async Task ConstructorWithStreamingUpdateAsync() + { + // Arrange + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); + + using var messageHandlerStub = new HttpMessageHandlerStub(); + messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + using var httpClient = new HttpClient(messageHandlerStub); + var openAIClient = new OpenAIClient("key", new() { Transport = new HttpClientPipelineTransport(httpClient) }); + + // Act & Assert + var enumerator = openAIClient.GetChatClient("modelId").CompleteChatStreamingAsync("Test message").GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + var update = enumerator.Current; + + // Act + var content = new OpenAIStreamingChatMessageContent(update!, 0, "model-id"); + + // Assert + Assert.Equal("Test chat streaming response", content.Content); + } + + [Fact] + public void ConstructorWithParameters() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", content.Content); + Assert.Equal(AuthorRole.User, content.Role); + Assert.Equal(0, content.ChoiceIndex); + Assert.Equal("testModel", content.ModelId); + Assert.Empty(content.ToolCallUpdates!); + Assert.Equal("test-value", content.Metadata!["test-index"]); + Assert.Equal(Encoding.UTF8, content.Encoding); + } + + [Fact] + public void ToStringReturnsAsExpected() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", content.ToString()); + } + + [Fact] + public void ToByteArrayReturnsAsExpected() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", Encoding.UTF8.GetString(content.ToByteArray())); + } + + /* + [Theory] + [MemberData(nameof(InvalidChoices))] + public void ConstructorWithInvalidChoiceSetsNullContent(object choice) + { + // Arrange + var streamingChoice = choice as ChatWithDataStreamingChoice; + + // Act + var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); + + // Assert + Assert.Null(content.Content); + } + + public static IEnumerable ValidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 1" } }] }, "Content 1" }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 2", Role = "Assistant" } }] }, "Content 2" }; + } + } + + public static IEnumerable InvalidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { EndTurn = true }] } }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content", Role = "tool" } }] } }; + } + }*/ +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs similarity index 96% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs index 722ee4d0817c..1010adbab869 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs @@ -8,7 +8,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; public class ChatHistoryExtensionsTests { [Fact] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..2c84068dc1b5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; + +public class KernelBuilderExtensionsTests +{ + [Fact] + public void ItCanAddTextEmbeddingGenerationService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient("key")) + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToImageService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextToImage("key", modelId: "model") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToAudioService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextToAudio("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIAudioToText("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextServiceWithOpenAIClient() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIAudioToText("model", new OpenAIClient("key")) + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + [Obsolete("This test is deprecated and will be removed in a future version.")] + public void ItCanAddFileService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIFiles("key").Build() + .GetRequiredService(); + } + + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddOpenAIChatCompletion("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.AddOpenAIChatCompletion("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIChatCompletion("model-id"), + _ => builder + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is OpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is OpenAIChatCompletionService); + } + + #endregion + + public enum InitializationType + { + ApiKey, + OpenAIClientInline, + OpenAIClientInServiceProvider, + OpenAIClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs similarity index 92% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs index b45fc64b60ba..e817d559aeaa 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs @@ -9,12 +9,12 @@ #pragma warning disable CA1812 // Uninstantiated internal types -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; public sealed class KernelFunctionMetadataExtensionsTests { [Fact] - public void ItCanConvertToOpenAIFunctionNoParameters() + public void ItCanConvertToAzureOpenAIFunctionNoParameters() { // Arrange var sut = new KernelFunctionMetadata("foo") @@ -44,7 +44,7 @@ public void ItCanConvertToOpenAIFunctionNoParameters() } [Fact] - public void ItCanConvertToOpenAIFunctionNoPluginName() + public void ItCanConvertToAzureOpenAIFunctionNoPluginName() { // Arrange var sut = new KernelFunctionMetadata("foo") @@ -76,7 +76,7 @@ public void ItCanConvertToOpenAIFunctionNoPluginName() [Theory] [InlineData(false)] [InlineData(true)] - public void ItCanConvertToOpenAIFunctionWithParameter(bool withSchema) + public void ItCanConvertToAzureOpenAIFunctionWithParameter(bool withSchema) { // Arrange var param1 = new KernelParameterMetadata("param1") @@ -118,7 +118,7 @@ public void ItCanConvertToOpenAIFunctionWithParameter(bool withSchema) } [Fact] - public void ItCanConvertToOpenAIFunctionWithParameterNoType() + public void ItCanConvertToAzureOpenAIFunctionWithParameterNoType() { // Arrange var param1 = new KernelParameterMetadata("param1") { Description = "This is param1" }; @@ -151,7 +151,7 @@ public void ItCanConvertToOpenAIFunctionWithParameterNoType() } [Fact] - public void ItCanConvertToOpenAIFunctionWithNoReturnParameterType() + public void ItCanConvertToAzureOpenAIFunctionWithNoReturnParameterType() { // Arrange var param1 = new KernelParameterMetadata("param1") @@ -180,7 +180,7 @@ public void ItCanConvertToOpenAIFunctionWithNoReturnParameterType() } [Fact] - public void ItCanCreateValidOpenAIFunctionManualForPlugin() + public void ItCanCreateValidAzureOpenAIFunctionManualForPlugin() { // Arrange var kernel = new Kernel(); @@ -197,12 +197,12 @@ public void ItCanCreateValidOpenAIFunctionManualForPlugin() Assert.NotNull(result); Assert.Equal( """{"type":"object","required":["parameter1","parameter2","parameter3"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"type":"string","enum":["Value1","Value2"],"description":"Enum parameter"},"parameter3":{"type":"string","format":"date-time","description":"DateTime parameter"}}}""", - result.Parameters.ToString() + result.FunctionParameters.ToString() ); } [Fact] - public void ItCanCreateValidOpenAIFunctionManualForPrompt() + public void ItCanCreateValidAzureOpenAIFunctionManualForPrompt() { // Arrange var promptTemplateConfig = new PromptTemplateConfig("Hello AI") @@ -232,7 +232,7 @@ public void ItCanCreateValidOpenAIFunctionManualForPrompt() Assert.NotNull(result); Assert.Equal( """{"type":"object","required":["parameter1","parameter2"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"enum":["Value1","Value2"],"description":"Enum parameter"}}}""", - result.Parameters.ToString() + result.FunctionParameters.ToString() ); } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs index c3ee67df7515..a1381fd231f4 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -19,7 +19,7 @@ public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse() var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin"); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); @@ -38,7 +38,7 @@ public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue() var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); @@ -57,7 +57,7 @@ public void TryGetFunctionAndArgumentsWithArgumentsReturnsTrue() var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}"); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}"); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..f4b8ddf334e6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; + +public class ServiceCollectionExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ItCanAddChatCompletionService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.ClientInline => builder.Services.AddOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddOpenAIChatCompletion("deployment-name"), + _ => builder.Services + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is OpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is OpenAIChatCompletionService); + } + + #endregion + + [Fact] + public void ItCanAddTextEmbeddingGenerationService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient("key")) + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddImageToTextService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextToImage("key", modelId: "model") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToAudioService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextToAudio("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIAudioToText("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextServiceWithOpenAIClient() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIAudioToText("model", new OpenAIClient("key")) + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + [Obsolete("This test is deprecated and will be removed in a future version.")] + public void ItCanAddFileService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIFiles("key") + .BuildServiceProvider() + .GetRequiredService(); + } + + public enum InitializationType + { + ApiKey, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs similarity index 62% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs index 40959c7c67ed..3ab5c0b7f960 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs @@ -3,14 +3,14 @@ using System; using System.Net.Http; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; +using OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -43,6 +43,18 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) Assert.Equal("model-id", service.Attributes["ModelId"]); } + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAIAudioToTextService(" ", "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService(" ", openAIClient: new("apikey"))); + Assert.Throws(() => new OpenAIAudioToTextService("", "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService("", openAIClient: new("apikey"))); + Assert.Throws(() => new OpenAIAudioToTextService(null!, "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService(null!, openAIClient: new("apikey"))); + } + [Theory] [InlineData(true)] [InlineData(false)] @@ -77,6 +89,26 @@ public async Task GetTextContentByDefaultWorksCorrectlyAsync() Assert.Equal("Test audio-to-text response", result[0].Text); } + [Fact] + public async Task GetTextContentThrowsIfAudioCantBeReadAsync() + { + // Arrange + var service = new OpenAIAudioToTextService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => { await service.GetTextContentsAsync(new AudioContent(new Uri("http://remote-audio")), new OpenAIAudioToTextExecutionSettings("file.mp3")); }); + } + + [Fact] + public async Task GetTextContentThrowsIfFileNameIsInvalidAsync() + { + // Arrange + var service = new OpenAIAudioToTextService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => { await service.GetTextContentsAsync(new AudioContent(new BinaryData("data"), mimeType: null), new OpenAIAudioToTextExecutionSettings("invalid")); }); + } + public void Dispose() { this._httpClient.Dispose(); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs similarity index 51% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs index 7d1c47388f91..f560c9924977 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs @@ -2,23 +2,28 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Globalization; using System.IO; +using System.Linq; using System.Net; using System.Net.Http; using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Azure.AI.OpenAI; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.TextGeneration; using Moq; +using OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; /// /// Unit tests for @@ -26,14 +31,17 @@ namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; public sealed class OpenAIChatCompletionServiceTests : IDisposable { private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly MultipleHttpMessageHandlerStub _multiMessageHandlerStub; private readonly HttpClient _httpClient; private readonly OpenAIFunction _timepluginDate, _timepluginNow; private readonly OpenAIPromptExecutionSettings _executionSettings; private readonly Mock _mockLoggerFactory; + private readonly ChatHistory _chatHistoryForTest = [new ChatMessageContent(AuthorRole.User, "test")]; public OpenAIChatCompletionServiceTests() { this._messageHandlerStub = new HttpMessageHandlerStub(); + this._multiMessageHandlerStub = new MultipleHttpMessageHandlerStub(); this._httpClient = new HttpClient(this._messageHandlerStub, false); this._mockLoggerFactory = new Mock(); @@ -68,12 +76,11 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) } [Theory] - [InlineData("http://localhost:1234/chat/completions", "http://localhost:1234/chat/completions")] // Uses full path when provided - [InlineData("http://localhost:1234/v2/chat/completions", "http://localhost:1234/v2/chat/completions")] // Uses full path when provided - [InlineData("http://localhost:1234", "http://localhost:1234/v1/chat/completions")] + [InlineData("http://localhost:1234/v1/chat/completions", "http://localhost:1234/v1/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234/", "http://localhost:1234/v1/chat/completions")] [InlineData("http://localhost:8080", "http://localhost:8080/v1/chat/completions")] [InlineData("https://something:8080", "https://something:8080/v1/chat/completions")] // Accepts TLS Secured endpoints - public async Task ItUsesCustomEndpointsWhenProvidedAsync(string endpointProvided, string expectedEndpoint) + public async Task ItUsesCustomEndpointsWhenProvidedDirectlyAsync(string endpointProvided, string expectedEndpoint) { // Arrange var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpointProvided)); @@ -81,7 +88,27 @@ public async Task ItUsesCustomEndpointsWhenProvidedAsync(string endpointProvided { Content = new StringContent(ChatCompletionResponse) }; // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.Equal(expectedEndpoint, this._messageHandlerStub.RequestUri!.ToString()); + } + + [Theory] + [InlineData("http://localhost:1234/v1/chat/completions", "http://localhost:1234/v1/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234/", "http://localhost:1234/v1/chat/completions")] + [InlineData("http://localhost:8080", "http://localhost:8080/v1/chat/completions")] + [InlineData("https://something:8080", "https://something:8080/v1/chat/completions")] // Accepts TLS Secured endpoints + public async Task ItUsesCustomEndpointsWhenProvidedAsBaseAddressAsync(string endpointProvided, string expectedEndpoint) + { + // Arrange + this._httpClient.BaseAddress = new Uri(endpointProvided); + var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpointProvided)); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); // Assert Assert.Equal(expectedEndpoint, this._messageHandlerStub.RequestUri!.ToString()); @@ -97,7 +124,7 @@ public async Task ItUsesHttpClientEndpointIfProvidedEndpointIsMissingAsync() { Content = new StringContent(ChatCompletionResponse) }; // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); // Assert Assert.Equal("http://localhost:12312/v1/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); @@ -112,7 +139,7 @@ public async Task ItUsesDefaultEndpointIfProvidedEndpointIsMissingAsync() { Content = new StringContent(ChatCompletionResponse) }; // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); // Assert Assert.Equal("https://api.openai.com/v1/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); @@ -143,7 +170,7 @@ public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoAsync() { Content = new StringContent(ChatCompletionResponse) }; // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync([new ChatMessageContent(AuthorRole.User, "test")], this._executionSettings); // Assert var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); @@ -164,7 +191,7 @@ public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNowAsync() this._executionSettings.ToolCallBehavior = ToolCallBehavior.RequireFunction(this._timepluginNow); // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); // Assert var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); @@ -184,7 +211,7 @@ public async Task ItCreatesNoFunctionsWhenUsingNoneAsync() this._executionSettings.ToolCallBehavior = null; // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); // Assert var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); @@ -220,7 +247,7 @@ public async Task ItGetChatMessageContentsShouldHaveModelIdDefinedAsync() // Arrange var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; + { Content = new StringContent(ChatCompletionResponse, Encoding.UTF8, "application/json") }; var chatHistory = new ChatHistory(); chatHistory.AddMessage(AuthorRole.User, "Hello"); @@ -239,7 +266,7 @@ public async Task ItGetTextContentsShouldHaveModelIdDefinedAsync() // Arrange var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; + { Content = new StringContent(ChatCompletionResponse, Encoding.UTF8, "application/json") }; var chatHistory = new ChatHistory(); chatHistory.AddMessage(AuthorRole.User, "Hello"); @@ -257,7 +284,7 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() { // Arrange var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -271,7 +298,7 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Text); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } [Fact] @@ -279,7 +306,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() { // Arrange var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -293,7 +320,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Content); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } [Fact] @@ -321,6 +348,197 @@ public async Task ItAddsSystemMessageAsync() Assert.Equal("user", messages[0].GetProperty("role").GetString()); } + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function1 = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var function2 = KernelFunctionFactory.CreateFromMethod((string argument) => + { + functionCallCount++; + throw new ArgumentException("Some exception"); + }, "FunctionWithException"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); + + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization-id", multiHttpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_multiple_function_calls_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + + this._multiMessageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Content); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + // Keep looping until the end of stream + while (await enumerator.MoveNextAsync()) + { + } + + Assert.Equal(2, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync() + { + // Arrange + const int DefaultMaximumAutoInvokeAttempts = 128; + const int ModelResponsesCount = 129; + + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: multiHttpClient, loggerFactory: this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var responses = new List(); + + for (var i = 0; i < ModelResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }); + } + + this._multiMessageHandlerStub.ResponsesToReturn = responses; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + var openAIFunction = plugin.GetFunctionsMetadata().First().ToOpenAIFunction(); + + kernel.Plugins.Add(plugin); + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: multiHttpClient, loggerFactory: this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + + this._multiMessageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator(); + + // Function Tool Call Streaming (One Chunk) + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Content); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + // Chat Completion Streaming (1st Chunk) + await enumerator.MoveNextAsync(); + Assert.Null(enumerator.Current.Metadata?["FinishReason"]); + + // Chat Completion Streaming (2nd Chunk) + await enumerator.MoveNextAsync(); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); + + Assert.Equal(1, functionCallCount); + + var requestContents = this._multiMessageHandlerStub.RequestContents; + + Assert.Equal(2, requestContents.Count); + + requestContents.ForEach(Assert.NotNull); + + var firstContent = Encoding.UTF8.GetString(requestContents[0]!); + var secondContent = Encoding.UTF8.GetString(requestContents[1]!); + + var firstContentJson = JsonSerializer.Deserialize(firstContent); + var secondContentJson = JsonSerializer.Deserialize(secondContent); + + Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString()); + } + + [Fact] + public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync() + { + // Arrange + const string Prompt = "This is test prompt"; + const string SystemMessage = "This is test system message"; + + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => service); + Kernel kernel = builder.Build(); + + // Act + var result = await kernel.InvokePromptAsync(Prompt, new(settings)); + + // Assert + Assert.Equal("Test chat response", result.ToString()); + + var requestContentByteArray = this._messageHandlerStub.RequestContent; + + Assert.NotNull(requestContentByteArray); + + var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray)); + + var messages = requestContent.GetProperty("messages"); + + Assert.Equal(2, messages.GetArrayLength()); + + Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString()); + Assert.Equal("system", messages[0].GetProperty("role").GetString()); + + Assert.Equal(Prompt, messages[1].GetProperty("content").GetString()); + Assert.Equal("user", messages[1].GetProperty("role").GetString()); + } + [Fact] public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync() { @@ -333,8 +551,8 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; + using var response = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(ChatCompletionResponse) }; + this._messageHandlerStub.ResponseToReturn = response; var chatHistory = new ChatHistory(); chatHistory.AddUserMessage(Prompt); @@ -380,7 +598,7 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT // Arrange this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) + Content = new StringContent(File.ReadAllText("TestData/chat_completion_multiple_function_calls_test_response.json")) }; var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); @@ -587,10 +805,258 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); } + [Theory] + [InlineData("string", "json_object")] + [InlineData("string", "text")] + [InlineData("string", "random")] + [InlineData("JsonElement.String", "\"json_object\"")] + [InlineData("JsonElement.String", "\"text\"")] + [InlineData("JsonElement.String", "\"random\"")] + [InlineData("ChatResponseFormat", "json_object")] + [InlineData("ChatResponseFormat", "text")] + public async Task GetChatMessageInResponseFormatsAsync(string formatType, string formatValue) + { + // Assert + object? format = null; + switch (formatType) + { + case "string": + format = formatValue; + break; + case "JsonElement.String": + format = JsonSerializer.Deserialize(formatValue); + break; + case "ChatResponseFormat": + format = formatValue == "text" ? ChatResponseFormat.Text : ChatResponseFormat.JsonObject; + break; + } + + var modelId = "gpt-4o"; + var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient); + OpenAIPromptExecutionSettings executionSettings = new() { ResponseFormat = format }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + // Act + var result = await sut.GetChatMessageContentAsync(this._chatHistoryForTest, executionSettings); + + // Assert + Assert.NotNull(result); + } + + [Fact(Skip = "Not working running in the console")] + public async Task GetInvalidResponseThrowsExceptionAndIsCapturedByDiagnosticsAsync() + { + // Arrange + bool startedChatCompletionsActivity = false; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent("Invalid JSON") }; + + var sut = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + + // Enable ModelDiagnostics + using var listener = new ActivityListener() + { + ShouldListenTo = (activitySource) => true, //activitySource.Name == typeof(ModelDiagnostics).Namespace!, + ActivityStarted = (activity) => + { + if (activity.OperationName == "chat.completions model-id") + { + startedChatCompletionsActivity = true; + } + }, + Sample = (ref ActivityCreationOptions options) => ActivitySamplingResult.AllData, + }; + + ActivitySource.AddActivityListener(listener); + + Environment.SetEnvironmentVariable("SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS", "true"); + Environment.SetEnvironmentVariable("SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE", "true"); + + // Act & Assert + await Assert.ThrowsAnyAsync(async () => { await sut.GetChatMessageContentsAsync(this._chatHistoryForTest); }); + + Assert.True(ModelDiagnostics.HasListeners()); + Assert.True(ModelDiagnostics.IsSensitiveEventsEnabled()); + Assert.True(ModelDiagnostics.IsModelDiagnosticsEnabled()); + Assert.True(startedChatCompletionsActivity); + } + + [Fact] + public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(firstResponse); + + using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(firstResponse); + + using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory, new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel)) + { + } + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + public void Dispose() { this._httpClient.Dispose(); this._messageHandlerStub.Dispose(); + this._multiMessageHandlerStub.Dispose(); + } + + private sealed class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter + { + private readonly Func, Task> _callback; + + public AutoFunctionInvocationFilter(Func, Task> callback) + { + Verify.NotNull(callback, nameof(callback)); + this._callback = callback; + } + + public AutoFunctionInvocationFilter(Action> callback) + { + Verify.NotNull(callback, nameof(callback)); + this._callback = (c, n) => { callback(c, n); return Task.CompletedTask; }; + } + + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + await this._callback(context, next); + } } private const string ChatCompletionResponse = """ @@ -605,12 +1071,17 @@ public void Dispose() "message": { "role": "assistant", "content": null, - "function_call": { - "name": "TimePlugin_Date", - "arguments": "{}" - } + "tool_calls":[{ + "id": "1", + "type": "function", + "function": { + "name": "TimePlugin-Date", + "arguments": "{}" + } + } + ] }, - "finish_reason": "stop" + "finish_reason": "tool_calls" } ], "usage": { @@ -620,68 +1091,4 @@ public void Dispose() } } """; - private const string AzureChatCompletionResponse = """ - { - "id": "chatcmpl-8S914omCBNQ0KU1NFtxmupZpzKWv2", - "object": "chat.completion", - "created": 1701718534, - "model": "gpt-3.5-turbo", - "prompt_filter_results": [ - { - "prompt_index": 0, - "content_filter_results": { - "hate": { - "filtered": false, - "severity": "safe" - }, - "self_harm": { - "filtered": false, - "severity": "safe" - }, - "sexual": { - "filtered": false, - "severity": "safe" - }, - "violence": { - "filtered": false, - "severity": "safe" - } - } - } - ], - "choices": [ - { - "index": 0, - "finish_reason": "stop", - "message": { - "role": "assistant", - "content": "Hello! How can I help you today? Please provide me with a question or topic you would like information on." - }, - "content_filter_results": { - "hate": { - "filtered": false, - "severity": "safe" - }, - "self_harm": { - "filtered": false, - "severity": "safe" - }, - "sexual": { - "filtered": false, - "severity": "safe" - }, - "violence": { - "filtered": false, - "severity": "safe" - } - } - } - ], - "usage": { - "prompt_tokens": 23, - "completion_tokens": 23, - "total_tokens": 46 - } - } - """; } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs index b9619fc1bc58..c763e729e381 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs @@ -12,11 +12,12 @@ using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.Files; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Files; /// /// Unit tests for class. /// +[Obsolete("This class is deprecated and will be removed in a future version.")] public sealed class OpenAIFileServiceTests : IDisposable { private readonly HttpMessageHandlerStub _messageHandlerStub; diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..0181d15d8449 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public class OpenAITextEmbeddingGenerationServiceTests +{ + [Fact] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected() + { + // Arrange + var sut = new OpenAITextEmbeddingGenerationService("model", "apiKey", dimensions: 2); + var sutWithOpenAIClient = new OpenAITextEmbeddingGenerationService("model", new OpenAIClient(new ApiKeyCredential("apiKey")), dimensions: 2); + + // Assert + Assert.NotNull(sut); + Assert.NotNull(sutWithOpenAIClient); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("model", sutWithOpenAIClient.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(" ", "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(" ", openAIClient: new("apikey"))); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService("", "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService("", openAIClient: new("apikey"))); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(null!, "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(null!, openAIClient: new("apikey"))); + } + + [Fact] + public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty() + { + // Arrange + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey"); + + // Act + var result = await sut.GenerateEmbeddingsAsync([], null, CancellationToken.None); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client); + + // Act + var result = await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None); + + // Assert + Assert.Single(result); + Assert.Equal(4, result[0].Length); + } + + [Fact] + public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-multiple-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client); + + // Act & Assert + await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None)); + } + + [Fact] + public async Task GetEmbeddingsDoesLogActionAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var modelId = "dall-e-2"; + var logger = new Mock>(); + logger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true); + + var mockLoggerFactory = new Mock(); + mockLoggerFactory.Setup(x => x.CreateLogger(It.IsAny())).Returns(logger.Object); + + var sut = new OpenAITextEmbeddingGenerationService(modelId, "apiKey", httpClient: client, loggerFactory: mockLoggerFactory.Object); + + // Act + await sut.GenerateEmbeddingsAsync(["description"]); + + // Assert + logger.VerifyLog(LogLevel.Information, $"Action: {nameof(OpenAITextEmbeddingGenerationService.GenerateEmbeddingsAsync)}. OpenAI Model ID: {modelId}.", Times.Once()); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs similarity index 53% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs index 588616f54348..e20d28385293 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs @@ -5,13 +5,14 @@ using System.Linq; using System.Net; using System.Net.Http; +using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -42,6 +43,16 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) // Assert Assert.NotNull(service); Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("Organization", OpenAITextToAudioService.OrganizationKey); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAITextToAudioService(" ", "apikey")); + Assert.Throws(() => new OpenAITextToAudioService("", "apikey")); + Assert.Throws(() => new OpenAITextToAudioService(null!, "apikey")); } [Theory] @@ -50,7 +61,7 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT { // Arrange var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); + using var stream = new MemoryStream([0x00, 0x00, 0xFF, 0x7F]); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -58,7 +69,7 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT }; // Act - var exception = await Record.ExceptionAsync(() => service.GetAudioContentsAsync("Some text", settings)); + var exception = await Assert.ThrowsAnyAsync(async () => await service.GetAudioContentsAsync("Some text", settings)); // Assert Assert.NotNull(exception); @@ -69,10 +80,10 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT public async Task GetAudioContentByDefaultWorksCorrectlyAsync() { // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); + using var stream = new MemoryStream(expectedByteArray); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -80,7 +91,7 @@ public async Task GetAudioContentByDefaultWorksCorrectlyAsync() }; // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); + var result = await service.GetAudioContentsAsync("Some text"); // Assert var audioData = result[0].Data!.Value; @@ -88,13 +99,68 @@ public async Task GetAudioContentByDefaultWorksCorrectlyAsync() Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); } + [Theory] + [InlineData("echo", "wav")] + [InlineData("fable", "opus")] + [InlineData("onyx", "flac")] + [InlineData("nova", "aac")] + [InlineData("shimmer", "pcm")] + public async Task GetAudioContentVoicesWorksCorrectlyAsync(string voice, string format) + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings(voice) { ResponseFormat = format }); + + // Assert + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + var audioData = result[0].Data!.Value; + Assert.Contains($"\"voice\":\"{voice}\"", requestBody); + Assert.Contains($"\"response_format\":\"{format}\"", requestBody); + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Fact] + public async Task GetAudioContentThrowsWhenVoiceIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice"))); + } + + [Fact] + public async Task GetAudioContentThrowsWhenFormatIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings() { ResponseFormat = "not supported" })); + } + [Theory] [InlineData(true, "http://local-endpoint")] [InlineData(false, "https://api.openai.com")] public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) { // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; if (useHttpClientBaseAddress) { @@ -102,7 +168,7 @@ public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAdd } var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); + using var stream = new MemoryStream(expectedByteArray); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -110,7 +176,7 @@ public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAdd }; // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); + var result = await service.GetAudioContentsAsync("Some text"); // Assert Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); @@ -124,6 +190,6 @@ public void Dispose() public static TheoryData ExecutionSettings => new() { - { new OpenAITextToAudioExecutionSettings(""), typeof(ArgumentException) }, + { new OpenAITextToAudioExecutionSettings("invalid"), typeof(NotSupportedException) }, }; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs new file mode 100644 index 000000000000..1528986b9064 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class OpenAITextToImageServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public OpenAITextToImageServiceTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-response.txt")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Fact] + public void ConstructorWorksCorrectly() + { + // Arrange & Act + var sut = new OpenAITextToImageService("apikey", "organization", "model"); + + // Assert + Assert.NotNull(sut); + Assert.Equal("organization", sut.Attributes[ClientCore.OrganizationKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(256, 256, "dall-e-2")] + [InlineData(512, 512, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-3")] + [InlineData(1024, 1792, "dall-e-3")] + [InlineData(1792, 1024, "dall-e-3")] + [InlineData(123, 321, "custom-model-1")] + [InlineData(179, 124, "custom-model-2")] + public async Task GenerateImageWorksCorrectlyAsync(int width, int height, string modelId) + { + // Arrange + var sut = new OpenAITextToImageService("api-key", modelId: modelId, httpClient: this._httpClient); + Assert.Equal(modelId, sut.Attributes["ModelId"]); + + // Act + var result = await sut.GenerateImageAsync("description", width, height); + + // Assert + Assert.Equal("https://image-url/", result); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs similarity index 90% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs index 96dd9c1a290b..66390ddfd94d 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.OpenAI.UniTests.Settings; /// /// Unit tests for class. @@ -28,7 +28,7 @@ public void ItReturnsValidOpenAIAudioToTextExecutionSettings() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "srt", Temperature = 0.2f }; @@ -49,7 +49,7 @@ public void ItCreatesOpenAIAudioToTextExecutionSettingsFromJson() "language": "en", "filename": "file.mp3", "prompt": "prompt", - "response_format": "text", + "response_format": "verbose_json", "temperature": 0.2 } """; @@ -65,7 +65,7 @@ public void ItCreatesOpenAIAudioToTextExecutionSettingsFromJson() Assert.Equal("en", settings.Language); Assert.Equal("file.mp3", settings.Filename); Assert.Equal("prompt", settings.Prompt); - Assert.Equal("text", settings.ResponseFormat); + Assert.Equal("verbose_json", settings.ResponseFormat); Assert.Equal(0.2f, settings.Temperature); } @@ -77,7 +77,7 @@ public void ItClonesAllProperties() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "json", Temperature = 0.2f, Filename = "something.mp3", }; @@ -88,7 +88,7 @@ public void ItClonesAllProperties() Assert.Equal("model_id", clone.ModelId); Assert.Equal("en", clone.Language); Assert.Equal("prompt", clone.Prompt); - Assert.Equal("text", clone.ResponseFormat); + Assert.Equal("json", clone.ResponseFormat); Assert.Equal(0.2f, clone.Temperature); Assert.Equal("something.mp3", clone.Filename); } @@ -101,7 +101,7 @@ public void ItFreezesAndPreventsMutation() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "vtt", Temperature = 0.2f, Filename = "something.mp3", }; @@ -112,7 +112,7 @@ public void ItFreezesAndPreventsMutation() Assert.Throws(() => settings.ModelId = "new_model"); Assert.Throws(() => settings.Language = "some_format"); Assert.Throws(() => settings.Prompt = "prompt"); - Assert.Throws(() => settings.ResponseFormat = "something"); + Assert.Throws(() => settings.ResponseFormat = "vtt"); Assert.Throws(() => settings.Temperature = 0.2f); Assert.Throws(() => settings.Filename = "something"); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index b64649230d96..d297b2691d0f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -7,10 +7,10 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Settings; /// -/// Unit tests of OpenAIPromptExecutionSettings +/// Unit tests of OpenAIPromptExecutionSettingsTests /// public class OpenAIPromptExecutionSettingsTests { @@ -23,16 +23,14 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults() // Assert Assert.NotNull(executionSettings); - Assert.Equal(1, executionSettings.Temperature); - Assert.Equal(1, executionSettings.TopP); - Assert.Equal(0, executionSettings.FrequencyPenalty); - Assert.Equal(0, executionSettings.PresencePenalty); - Assert.Equal(1, executionSettings.ResultsPerPrompt); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.FrequencyPenalty); + Assert.Null(executionSettings.PresencePenalty); Assert.Null(executionSettings.StopSequences); Assert.Null(executionSettings.TokenSelectionBiases); Assert.Null(executionSettings.TopLogprobs); Assert.Null(executionSettings.Logprobs); - Assert.Null(executionSettings.AzureChatExtensionsOptions); Assert.Equal(128, executionSettings.MaxTokens); } @@ -46,7 +44,6 @@ public void ItUsesExistingOpenAIExecutionSettings() TopP = 0.7, FrequencyPenalty = 0.7, PresencePenalty = 0.7, - ResultsPerPrompt = 2, StopSequences = new string[] { "foo", "bar" }, ChatSystemPrompt = "chat system prompt", MaxTokens = 128, @@ -61,6 +58,7 @@ public void ItUsesExistingOpenAIExecutionSettings() // Assert Assert.NotNull(executionSettings); Assert.Equal(actualSettings, executionSettings); + Assert.Equal(128, executionSettings.MaxTokens); } [Fact] @@ -232,7 +230,6 @@ public void PromptExecutionSettingsFreezeWorksAsExpected() // Assert Assert.True(executionSettings.IsFrozen); Assert.Throws(() => executionSettings.ModelId = "gpt-4"); - Assert.Throws(() => executionSettings.ResultsPerPrompt = 2); Assert.Throws(() => executionSettings.Temperature = 1); Assert.Throws(() => executionSettings.TopP = 1); Assert.Throws(() => executionSettings.StopSequences?.Add("STOP")); @@ -246,14 +243,14 @@ public void PromptExecutionSettingsFreezeWorksAsExpected() public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences() { // Arrange - var executionSettings = new OpenAIPromptExecutionSettings { StopSequences = [] }; + PromptExecutionSettings settings = new OpenAIPromptExecutionSettings { StopSequences = [] }; // Act -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - var executionSettingsWithData = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); -#pragma warning restore CS0618 + var executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(settings); + // Assert - Assert.Null(executionSettingsWithData.StopSequences); + Assert.NotNull(executionSettings.StopSequences); + Assert.Empty(executionSettings.StopSequences); } private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) @@ -263,7 +260,6 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut Assert.Equal(0.7, executionSettings.TopP); Assert.Equal(0.7, executionSettings.FrequencyPenalty); Assert.Equal(0.7, executionSettings.PresencePenalty); - Assert.Equal(2, executionSettings.ResultsPerPrompt); Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs index ea1b1adafae5..f30478e15acf 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; +namespace SemanticKernel.Connectors.OpenAI.UniTests.Settings; /// /// Unit tests for class. diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt new file mode 100644 index 000000000000..be41c2eaf843 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"logprobs":null,"finish_reason":null}]} + +data: {"id":}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json new file mode 100644 index 000000000000..737b972309ba --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json @@ -0,0 +1,64 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + }, + { + "id": "2", + "type": "function", + "function": { + "name": "MyPlugin-FunctionWithException", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "3", + "type": "function", + "function": { + "name": "MyPlugin-NonExistentFunction", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "4", + "type": "function", + "function": { + "name": "MyPlugin-InvalidArguments", + "arguments": "invalid_arguments_format" + } + }, + { + "id": "5", + "type": "function", + "function": { + "name": "MyPlugin-IntArguments", + "arguments": "{\n\"age\": 36\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json new file mode 100644 index 000000000000..6c93e434f259 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json @@ -0,0 +1,32 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..ceb8f3e8b44b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,9 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin-NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin-InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt new file mode 100644 index 000000000000..6835039941ce --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt @@ -0,0 +1,3 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt new file mode 100644 index 000000000000..e5e8d1b19afd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json new file mode 100644 index 000000000000..b601bac8b55b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json @@ -0,0 +1,22 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1704208954, + "model": "gpt-4", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Test chat response" + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + }, + "system_fingerprint": null +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt new file mode 100644 index 000000000000..5e17403da9fc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt @@ -0,0 +1 @@ +data: {"id":"response-id","model":"","created":1684304924,"object":"chat.completion","choices":[{"index":0,"messages":[{"delta":{"role":"assistant","content":"Test chat with data streaming response"},"end_turn":false}]}]} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json new file mode 100644 index 000000000000..1d1d4e78b5bd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json @@ -0,0 +1,28 @@ +{ + "id": "response-id", + "model": "", + "created": 1684304924, + "object": "chat.completion", + "choices": [ + { + "index": 0, + "messages": [ + { + "role": "tool", + "content": "{\"citations\": [{\"content\": \"\\OpenAI AI services are cloud-based artificial intelligence (AI) services...\", \"id\": null, \"title\": \"What is OpenAI AI services\", \"filepath\": null, \"url\": null, \"metadata\": {\"chunking\": \"original document size=250. Scores=0.4314117431640625 and 1.72564697265625.Org Highlight count=4.\"}, \"chunk_id\": \"0\"}], \"intent\": \"[\\\"Learn about OpenAI AI services.\\\"]\"}", + "end_turn": false + }, + { + "role": "assistant", + "content": "Test chat with data response", + "end_turn": true + } + ] + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json new file mode 100644 index 000000000000..3ffa6b00cc3f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json @@ -0,0 +1,40 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-Function1", + "arguments": "{\n\"parameter\": \"function1-value\"\n}" + } + }, + { + "id": "2", + "type": "function", + "function": { + "name": "MyPlugin-Function2", + "arguments": "{\n\"parameter\": \"function2-value\"\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..c8aeb98e8b82 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt new file mode 100644 index 000000000000..46a9581cf0cc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt @@ -0,0 +1,20 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + }, + { + "object": "embedding", + "index": 1, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt new file mode 100644 index 000000000000..c715b851b78c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt @@ -0,0 +1,15 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.txt new file mode 100644 index 000000000000..7d8f7327a5ec --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.txt @@ -0,0 +1,8 @@ +{ + "created": 1702575371, + "data": [ + { + "url": "https://image-url/" + } + ] +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs index d39480ebfe8d..76b6c47360b6 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs @@ -2,13 +2,13 @@ using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; using static Microsoft.SemanticKernel.Connectors.OpenAI.ToolCallBehavior; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests; /// /// Unit tests for @@ -24,6 +24,7 @@ public void EnableKernelFunctionsReturnsCorrectKernelFunctionsInstance() // Assert Assert.IsType(behavior); Assert.Equal(0, behavior.MaximumAutoInvokeAttempts); + Assert.Equal($"{nameof(KernelFunctions)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -47,6 +48,7 @@ public void EnableFunctionsReturnsEnabledFunctionsInstance() // Assert Assert.IsType(behavior); + Assert.Contains($"{nameof(EnabledFunctions)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -57,6 +59,7 @@ public void RequireFunctionReturnsRequiredFunctionInstance() // Assert Assert.IsType(behavior); + Assert.Contains($"{nameof(RequiredFunction)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -64,13 +67,13 @@ public void KernelFunctionsConfigureOptionsWithNullKernelDoesNotAddTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act - kernelFunctions.ConfigureOptions(null, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(null); // Assert - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -78,15 +81,14 @@ public void KernelFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act - kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(kernel); // Assert - Assert.Null(chatCompletionsOptions.ToolChoice); - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -94,7 +96,6 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); var plugin = this.GetTestPlugin(); @@ -102,12 +103,12 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools() kernel.Plugins.Add(plugin); // Act - kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(kernel); // Assert - Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); + Assert.Equal(ChatToolChoice.Auto, options.Choice); - this.AssertTools(chatCompletionsOptions); + this.AssertTools(options.Tools); } [Fact] @@ -115,14 +116,13 @@ public void EnabledFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() { // Arrange var enabledFunctions = new EnabledFunctions([], autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act - enabledFunctions.ConfigureOptions(null, chatCompletionsOptions); + var options = enabledFunctions.ConfigureOptions(null); // Assert - Assert.Null(chatCompletionsOptions.ToolChoice); - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -131,10 +131,9 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsExc // Arrange var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act & Assert - var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null, chatCompletionsOptions)); + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null)); Assert.Equal($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.", exception.Message); } @@ -144,11 +143,10 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsEx // Arrange var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act & Assert - var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions)); + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel)); Assert.Equal($"The specified {nameof(EnabledFunctions)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message); } @@ -161,18 +159,17 @@ public void EnabledFunctionsConfigureOptionsWithKernelAndPluginsAddsTools(bool a var plugin = this.GetTestPlugin(); var functions = plugin.GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); kernel.Plugins.Add(plugin); // Act - enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = enabledFunctions.ConfigureOptions(kernel); // Assert - Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); + Assert.Equal(ChatToolChoice.Auto, options.Choice); - this.AssertTools(chatCompletionsOptions); + this.AssertTools(options.Tools); } [Fact] @@ -181,10 +178,9 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsEx // Arrange var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()).First(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act & Assert - var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null, chatCompletionsOptions)); + var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null)); Assert.Equal($"Auto-invocation with {nameof(RequiredFunction)} is not supported when no kernel is provided.", exception.Message); } @@ -194,11 +190,10 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsE // Arrange var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()).First(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act & Assert - var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions)); + var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel)); Assert.Equal($"The specified {nameof(RequiredFunction)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message); } @@ -208,18 +203,17 @@ public void RequiredFunctionConfigureOptionsAddsTools() // Arrange var plugin = this.GetTestPlugin(); var function = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - var chatCompletionsOptions = new ChatCompletionsOptions(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); var kernel = new Kernel(); kernel.Plugins.Add(plugin); // Act - requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions); + var options = requiredFunction.ConfigureOptions(kernel); // Assert - Assert.NotNull(chatCompletionsOptions.ToolChoice); + Assert.NotNull(options.Choice); - this.AssertTools(chatCompletionsOptions); + this.AssertTools(options.Tools); } private KernelPlugin GetTestPlugin() @@ -234,16 +228,15 @@ private KernelPlugin GetTestPlugin() return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); } - private void AssertTools(ChatCompletionsOptions chatCompletionsOptions) + private void AssertTools(IList? tools) { - Assert.Single(chatCompletionsOptions.Tools); - - var tool = chatCompletionsOptions.Tools[0] as ChatCompletionsFunctionToolDefinition; + Assert.NotNull(tools); + var tool = Assert.Single(tools); Assert.NotNull(tool); - Assert.Equal("MyPlugin-MyFunction", tool.Name); - Assert.Equal("Test Function", tool.Description); - Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.Parameters.ToString()); + Assert.Equal("MyPlugin-MyFunction", tool.FunctionName); + Assert.Equal("Test Function", tool.FunctionDescription); + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.FunctionParameters.ToString()); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs deleted file mode 100644 index 89ecb3bef22b..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Helper class to inject headers into Azure SDK HTTP pipeline -/// -internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy -{ - private readonly string _headerName = headerName; - private readonly string _headerValue = headerValue; - - public override void OnSendingRequest(HttpMessage message) - { - message.Request.Headers.Add(this._headerName, this._headerValue); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs deleted file mode 100644 index dd02ddd0ebee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI text-to-audio client for HTTP operations. -/// -[Experimental("SKEXP0001")] -internal sealed class AzureOpenAITextToAudioClient -{ - private readonly ILogger _logger; - private readonly HttpClient _httpClient; - - private readonly string _deploymentName; - private readonly string _endpoint; - private readonly string _apiKey; - private readonly string? _modelId; - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Creates an instance of the with API key auth. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAITextToAudioClient( - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - Verify.NotNullOrWhiteSpace(apiKey); - - this._deploymentName = deploymentName; - this._endpoint = endpoint; - this._apiKey = apiKey; - this._modelId = modelId; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - internal async Task> GetAudioContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - OpenAITextToAudioExecutionSettings? audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.NotNullOrWhiteSpace(audioExecutionSettings?.Voice); - - var modelId = this.GetModelId(audioExecutionSettings); - - using var request = this.GetRequest(text, modelId, audioExecutionSettings); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - var data = await response.Content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false); - - return [new(data, modelId)]; - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - #region private - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Api-Key", this._apiKey); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureOpenAITextToAudioClient))); - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on text-to-audio request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private HttpRequestMessage GetRequest(string text, string modelId, OpenAITextToAudioExecutionSettings executionSettings) - { - const string DefaultApiVersion = "2024-02-15-preview"; - - var baseUrl = !string.IsNullOrWhiteSpace(this._httpClient.BaseAddress?.AbsoluteUri) ? - this._httpClient.BaseAddress!.AbsoluteUri : - this._endpoint; - - var requestUrl = $"openai/deployments/{this._deploymentName}/audio/speech?api-version={DefaultApiVersion}"; - - var payload = new TextToAudioRequest(modelId, text, executionSettings.Voice) - { - ResponseFormat = executionSettings.ResponseFormat, - Speed = executionSettings.Speed - }; - - return HttpRequest.CreatePostRequest($"{baseUrl.TrimEnd('/')}/{requestUrl}", payload); - } - - private string GetModelId(OpenAITextToAudioExecutionSettings executionSettings) - { - return - !string.IsNullOrWhiteSpace(this._modelId) ? this._modelId! : - !string.IsNullOrWhiteSpace(executionSettings.ModelId) ? executionSettings.ModelId! : - this._deploymentName; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs deleted file mode 100644 index 594b420bc5f2..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI specialized with data chat message content -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIWithDataChatMessageContent : ChatMessageContent -{ - /// - /// Content from data source, including citations. - /// For more information see . - /// - public string? ToolContent { get; set; } - - /// - /// Initializes a new instance of the class. - /// - /// Azure Chat With Data Choice - /// The model ID used to generate the content - /// Additional metadata - internal AzureOpenAIWithDataChatMessageContent(ChatWithDataChoice chatChoice, string? modelId, IReadOnlyDictionary? metadata = null) - : base(default, string.Empty, modelId, chatChoice, System.Text.Encoding.UTF8, CreateMetadataDictionary(metadata)) - { - // An assistant message content must be present, otherwise the chat is not valid. - var chatMessage = chatChoice.Messages.FirstOrDefault(m => string.Equals(m.Role, AuthorRole.Assistant.Label, StringComparison.OrdinalIgnoreCase)) ?? - throw new ArgumentException("Chat is not valid. Chat message does not contain any messages with 'assistant' role."); - - this.Content = chatMessage.Content; - this.Role = new AuthorRole(chatMessage.Role); - - this.ToolContent = chatChoice.Messages.FirstOrDefault(message => message.Role.Equals(AuthorRole.Tool.Label, StringComparison.OrdinalIgnoreCase))?.Content; - ((Dictionary)this.Metadata!).Add(nameof(this.ToolContent), this.ToolContent); - } - - private static Dictionary CreateMetadataDictionary(IReadOnlyDictionary? metadata) - { - Dictionary newDictionary; - if (metadata is null) - { - // There's no existing metadata to clone; just allocate a new dictionary. - newDictionary = new Dictionary(1); - } - else if (metadata is IDictionary origMutable) - { - // Efficiently clone the old dictionary to a new one. - newDictionary = new Dictionary(origMutable); - } - else - { - // There's metadata to clone but we have to do so one item at a time. - newDictionary = new Dictionary(metadata.Count + 1); - foreach (var kvp in metadata) - { - newDictionary[kvp.Key] = kvp.Value; - } - } - - return newDictionary; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs deleted file mode 100644 index ebe57f446293..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Text; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure Open AI WithData Specialized streaming chat message content. -/// -/// -/// Represents a chat message content chunk that was streamed from the remote model. -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIWithDataStreamingChatMessageContent : StreamingChatMessageContent -{ - /// - public string? FunctionName { get; set; } - - /// - public string? FunctionArgument { get; set; } - - /// - /// Create a new instance of the class. - /// - /// Azure message update representation from WithData apis - /// Index of the choice - /// The model ID used to generate the content - /// Additional metadata - internal AzureOpenAIWithDataStreamingChatMessageContent(ChatWithDataStreamingChoice choice, int choiceIndex, string modelId, IReadOnlyDictionary? metadata = null) : - base(AuthorRole.Assistant, null, choice, choiceIndex, modelId, Encoding.UTF8, metadata) - { - var message = choice.Messages.FirstOrDefault(this.IsValidMessage); - var messageContent = message?.Delta?.Content; - - this.Content = messageContent; - } - - private bool IsValidMessage(ChatWithDataStreamingMessage message) - { - return !message.EndTurn && - (message.Delta.Role is null || !message.Delta.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs deleted file mode 100644 index 6cfcf4e3e459..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs +++ /dev/null @@ -1,1591 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; - -#pragma warning disable CA2208 // Instantiate argument exceptions correctly - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Base class for AI clients that provides common functionality for interacting with OpenAI services. -/// -internal abstract class ClientCore -{ - private const string ModelProvider = "openai"; - private const int MaxResultsPerPrompt = 128; - - /// - /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current - /// asynchronous chain of execution. - /// - /// - /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that - /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, - /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close - /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. - /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in - /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that - /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, - /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent - /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made - /// configurable should need arise. - /// - private const int MaxInflightAutoInvokes = 128; - - /// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy. - private static readonly ChatCompletionsFunctionToolDefinition s_nonInvocableFunctionTool = new() { Name = "NonInvocableTool" }; - - /// Tracking for . - private static readonly AsyncLocal s_inflightAutoInvokes = new(); - - internal ClientCore(ILogger? logger = null) - { - this.Logger = logger ?? NullLogger.Instance; - } - - /// - /// Model Id or Deployment Name - /// - internal string DeploymentOrModelName { get; set; } = string.Empty; - - /// - /// OpenAI / Azure OpenAI Client - /// - internal abstract OpenAIClient Client { get; } - - internal Uri? Endpoint { get; set; } = null; - - /// - /// Logger instance - /// - internal ILogger Logger { get; set; } - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Instance of for metrics. - /// - private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI"); - - /// - /// Instance of to keep track of the number of prompt tokens used. - /// - private static readonly Counter s_promptTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.prompt", - unit: "{token}", - description: "Number of prompt tokens used"); - - /// - /// Instance of to keep track of the number of completion tokens used. - /// - private static readonly Counter s_completionTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.completion", - unit: "{token}", - description: "Number of completion tokens used"); - - /// - /// Instance of to keep track of the total number of tokens used. - /// - private static readonly Counter s_totalTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.total", - unit: "{token}", - description: "Number of tokens used"); - - /// - /// Creates completions for the prompt and settings. - /// - /// The prompt to complete. - /// Execution settings for the completion API. - /// The containing services, plugins, and other state for use throughout the operation. - /// The to monitor for cancellation requests. The default is . - /// Completions generated by the remote model - internal async Task> GetTextResultsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textExecutionSettings.MaxTokens); - - var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName); - - Completions? responseData = null; - List responseContent; - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings)) - { - try - { - responseData = (await RunRequestAsync(() => this.Client.GetCompletionsAsync(options, cancellationToken)).ConfigureAwait(false)).Value; - if (responseData.Choices.Count == 0) - { - throw new KernelException("Text completions not found"); - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - if (responseData != null) - { - // Capture available metadata even if the operation failed. - activity - .SetResponseId(responseData.Id) - .SetPromptTokenUsage(responseData.Usage.PromptTokens) - .SetCompletionTokenUsage(responseData.Usage.CompletionTokens); - } - throw; - } - - responseContent = responseData.Choices.Select(choice => new TextContent(choice.Text, this.DeploymentOrModelName, choice, Encoding.UTF8, GetTextChoiceMetadata(responseData, choice))).ToList(); - activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens); - } - - this.LogUsage(responseData.Usage); - - return responseContent; - } - - internal async IAsyncEnumerable GetStreamingTextContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textExecutionSettings.MaxTokens); - - var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName); - - using var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings); - - StreamingResponse response; - try - { - response = await RunRequestAsync(() => this.Client.GetCompletionsStreamingAsync(options, cancellationToken)).ConfigureAwait(false); - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); - List? streamedContents = activity is not null ? [] : null; - try - { - while (true) - { - try - { - if (!await responseEnumerator.MoveNextAsync()) - { - break; - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - Completions completions = responseEnumerator.Current; - foreach (Choice choice in completions.Choices) - { - var openAIStreamingTextContent = new OpenAIStreamingTextContent( - choice.Text, choice.Index, this.DeploymentOrModelName, choice, GetTextChoiceMetadata(completions, choice)); - streamedContents?.Add(openAIStreamingTextContent); - yield return openAIStreamingTextContent; - } - } - } - finally - { - activity?.EndStreaming(streamedContents); - await responseEnumerator.DisposeAsync(); - } - } - - private static Dictionary GetTextChoiceMetadata(Completions completions, Choice choice) - { - return new Dictionary(8) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, - { nameof(completions.Usage), completions.Usage }, - { nameof(choice.ContentFilterResults), choice.ContentFilterResults }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(choice.FinishReason), choice.FinishReason?.ToString() }, - - { nameof(choice.LogProbabilityModel), choice.LogProbabilityModel }, - { nameof(choice.Index), choice.Index }, - }; - } - - private static Dictionary GetChatChoiceMetadata(ChatCompletions completions, ChatChoice chatChoice) - { - return new Dictionary(12) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, - { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, - { nameof(completions.Usage), completions.Usage }, - { nameof(chatChoice.ContentFilterResults), chatChoice.ContentFilterResults }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(chatChoice.FinishReason), chatChoice.FinishReason?.ToString() }, - - { nameof(chatChoice.FinishDetails), chatChoice.FinishDetails }, - { nameof(chatChoice.LogProbabilityInfo), chatChoice.LogProbabilityInfo }, - { nameof(chatChoice.Index), chatChoice.Index }, - { nameof(chatChoice.Enhancements), chatChoice.Enhancements }, - }; - } - - private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions) - { - return new Dictionary(4) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(completions.FinishReason), completions.FinishReason?.ToString() }, - }; - } - - private static Dictionary GetResponseMetadata(AudioTranscription audioTranscription) - { - return new Dictionary(3) - { - { nameof(audioTranscription.Language), audioTranscription.Language }, - { nameof(audioTranscription.Duration), audioTranscription.Duration }, - { nameof(audioTranscription.Segments), audioTranscription.Segments } - }; - } - - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The containing services, plugins, and other state for use throughout the operation. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - internal async Task>> GetEmbeddingsAsync( - IList data, - Kernel? kernel, - int? dimensions, - CancellationToken cancellationToken) - { - var result = new List>(data.Count); - - if (data.Count > 0) - { - var embeddingsOptions = new EmbeddingsOptions(this.DeploymentOrModelName, data) - { - Dimensions = dimensions - }; - - var response = await RunRequestAsync(() => this.Client.GetEmbeddingsAsync(embeddingsOptions, cancellationToken)).ConfigureAwait(false); - var embeddings = response.Value.Data; - - if (embeddings.Count != data.Count) - { - throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}"); - } - - for (var i = 0; i < embeddings.Count; i++) - { - result.Add(embeddings[i].Embedding); - } - } - - return result; - } - - internal async Task> GetTextContentFromAudioAsync( - AudioContent content, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - Verify.NotNull(content.Data); - var audioData = content.Data.Value; - if (audioData.IsEmpty) - { - throw new ArgumentException("Audio data cannot be empty", nameof(content)); - } - - OpenAIAudioToTextExecutionSettings? audioExecutionSettings = OpenAIAudioToTextExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.ValidFilename(audioExecutionSettings?.Filename); - - var audioOptions = new AudioTranscriptionOptions - { - AudioData = BinaryData.FromBytes(audioData), - DeploymentName = this.DeploymentOrModelName, - Filename = audioExecutionSettings.Filename, - Language = audioExecutionSettings.Language, - Prompt = audioExecutionSettings.Prompt, - ResponseFormat = audioExecutionSettings.ResponseFormat, - Temperature = audioExecutionSettings.Temperature - }; - - AudioTranscription responseData = (await RunRequestAsync(() => this.Client.GetAudioTranscriptionAsync(audioOptions, cancellationToken)).ConfigureAwait(false)).Value; - - return [new(responseData.Text, this.DeploymentOrModelName, metadata: GetResponseMetadata(responseData))]; - } - - /// - /// Generate a new chat message - /// - /// Chat history - /// Execution settings for the completion API. - /// The containing services, plugins, and other state for use throughout the operation. - /// Async cancellation token - /// Generated chat message in string format - internal async Task> GetChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - // Convert the incoming execution settings to OpenAI settings. - OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; - ValidateMaxTokens(chatExecutionSettings.MaxTokens); - ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); - - // Create the Azure SDK ChatCompletionOptions instance from all available information. - var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); - - for (int requestIndex = 1; ; requestIndex++) - { - // Make the request. - ChatCompletions? responseData = null; - List responseContent; - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings)) - { - try - { - responseData = (await RunRequestAsync(() => this.Client.GetChatCompletionsAsync(chatOptions, cancellationToken)).ConfigureAwait(false)).Value; - this.LogUsage(responseData.Usage); - if (responseData.Choices.Count == 0) - { - throw new KernelException("Chat completions not found"); - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - if (responseData != null) - { - // Capture available metadata even if the operation failed. - activity - .SetResponseId(responseData.Id) - .SetPromptTokenUsage(responseData.Usage.PromptTokens) - .SetCompletionTokenUsage(responseData.Usage.CompletionTokens); - } - throw; - } - - responseContent = responseData.Choices.Select(chatChoice => this.GetChatMessage(chatChoice, responseData)).ToList(); - activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens); - } - - // If we don't want to attempt to invoke any functions, just return the result. - // Or if we are auto-invoking but we somehow end up with other than 1 choice even though only 1 was requested, similarly bail. - if (!autoInvoke || responseData.Choices.Count != 1) - { - return responseContent; - } - - Debug.Assert(kernel is not null); - - // Get our single result and extract the function call information. If this isn't a function call, or if it is - // but we're unable to find the function or extract the relevant information, just return the single result. - // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service - // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool - // is specified. - ChatChoice resultChoice = responseData.Choices[0]; - OpenAIChatMessageContent result = this.GetChatMessage(resultChoice, responseData); - if (result.ToolCalls.Count == 0) - { - return [result]; - } - - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Tool requests: {Requests}", result.ToolCalls.Count); - } - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", result.ToolCalls.OfType().Select(ftc => $"{ftc.Name}({ftc.Arguments})"))); - } - - // Add the original assistant message to the chatOptions; this is required for the service - // to understand the tool call responses. Also add the result message to the caller's chat - // history: if they don't want it, they can remove it, but this makes the data available, - // including metadata like usage. - chatOptions.Messages.Add(GetRequestMessage(resultChoice.Message)); - chat.Add(result); - - // We must send back a response for every tool call, regardless of whether we successfully executed it or not. - // If we successfully execute it, we'll add the result. If we don't, we'll add an error. - for (int toolCallIndex = 0; toolCallIndex < result.ToolCalls.Count; toolCallIndex++) - { - ChatCompletionsToolCall toolCall = result.ToolCalls[toolCallIndex]; - - // We currently only know about function tool calls. If it's anything else, we'll respond with an error. - if (toolCall is not ChatCompletionsFunctionToolCall functionToolCall) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger); - continue; - } - - // Parse the function call arguments. - OpenAIFunctionToolCall? openAIFunctionToolCall; - try - { - openAIFunctionToolCall = new(functionToolCall); - } - catch (JsonException) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger); - continue; - } - - // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, - // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able - // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. - if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && - !IsRequestableTool(chatOptions, openAIFunctionToolCall)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger); - continue; - } - - // Find the function in the kernel and populate the arguments. - if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger); - continue; - } - - // Now, invoke the function, and add the resulting tool call message to the chat options. - FunctionResult functionResult = new(function) { Culture = kernel.Culture }; - AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, result) - { - ToolCallId = toolCall.Id, - Arguments = functionArgs, - RequestSequenceIndex = requestIndex - 1, - FunctionSequenceIndex = toolCallIndex, - FunctionCount = result.ToolCalls.Count, - CancellationToken = cancellationToken - }; - - s_inflightAutoInvokes.Value++; - try - { - invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => - { - // Check if filter requested termination. - if (context.Terminate) - { - return; - } - - // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any - // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, - // as the called function could in turn telling the model about itself as a possible candidate for invocation. - context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); - }).ConfigureAwait(false); - } -#pragma warning disable CA1031 // Do not catch general exception types - catch (Exception e) -#pragma warning restore CA1031 // Do not catch general exception types - { - AddResponseMessage(chatOptions, chat, null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger); - continue; - } - finally - { - s_inflightAutoInvokes.Value--; - } - - // Apply any changes from the auto function invocation filters context to final result. - functionResult = invocationContext.Result; - - object functionResultValue = functionResult.GetValue() ?? string.Empty; - var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); - - AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, functionToolCall, this.Logger); - - // If filter requested termination, returning latest function result. - if (invocationContext.Terminate) - { - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Filter requested termination of automatic function invocation."); - } - - return [chat.Last()]; - } - } - - // Update tool use information for the next go-around based on having completed another iteration. - Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); - - // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value. - chatOptions.ToolChoice = ChatCompletionsToolChoice.None; - chatOptions.Tools.Clear(); - - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) - { - // Don't add any tools as we've reached the maximum attempts limit. - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); - } - } - else - { - // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented - // what functions are available in the kernel. - chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions); - } - - // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'") - // if we don't send any tools in subsequent requests, even if we say not to use any. - if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None) - { - Debug.Assert(chatOptions.Tools.Count == 0); - chatOptions.Tools.Add(s_nonInvocableFunctionTool); - } - - // Disable auto invocation if we've exceeded the allowed limit. - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) - { - autoInvoke = false; - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); - } - } - } - } - - internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - - ValidateMaxTokens(chatExecutionSettings.MaxTokens); - - bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; - ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); - - var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); - - StringBuilder? contentBuilder = null; - Dictionary? toolCallIdsByIndex = null; - Dictionary? functionNamesByIndex = null; - Dictionary? functionArgumentBuildersByIndex = null; - - for (int requestIndex = 1; ; requestIndex++) - { - // Reset state - contentBuilder?.Clear(); - toolCallIdsByIndex?.Clear(); - functionNamesByIndex?.Clear(); - functionArgumentBuildersByIndex?.Clear(); - - // Stream the response. - IReadOnlyDictionary? metadata = null; - string? streamedName = null; - ChatRole? streamedRole = default; - CompletionsFinishReason finishReason = default; - ChatCompletionsFunctionToolCall[]? toolCalls = null; - FunctionCallContent[]? functionCallContents = null; - - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings)) - { - // Make the request. - StreamingResponse response; - try - { - response = await RunRequestAsync(() => this.Client.GetChatCompletionsStreamingAsync(chatOptions, cancellationToken)).ConfigureAwait(false); - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); - List? streamedContents = activity is not null ? [] : null; - try - { - while (true) - { - try - { - if (!await responseEnumerator.MoveNextAsync()) - { - break; - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - StreamingChatCompletionsUpdate update = responseEnumerator.Current; - metadata = GetResponseMetadata(update); - streamedRole ??= update.Role; - streamedName ??= update.AuthorName; - finishReason = update.FinishReason ?? default; - - // If we're intending to invoke function calls, we need to consume that function call information. - if (autoInvoke) - { - if (update.ContentUpdate is { Length: > 0 } contentUpdate) - { - (contentBuilder ??= new()).Append(contentUpdate); - } - - OpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); - } - - AuthorRole? role = null; - if (streamedRole.HasValue) - { - role = new AuthorRole(streamedRole.Value.ToString()); - } - - OpenAIStreamingChatMessageContent openAIStreamingChatMessageContent = - new(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata) - { - AuthorName = streamedName, - Role = role, - }; - - if (update.ToolCallUpdate is StreamingFunctionToolCallUpdate functionCallUpdate) - { - openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( - callId: functionCallUpdate.Id, - name: functionCallUpdate.Name, - arguments: functionCallUpdate.ArgumentsUpdate, - functionCallIndex: functionCallUpdate.ToolCallIndex)); - } - - streamedContents?.Add(openAIStreamingChatMessageContent); - yield return openAIStreamingChatMessageContent; - } - - // Translate all entries into ChatCompletionsFunctionToolCall instances. - toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( - ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); - - // Translate all entries into FunctionCallContent instances for diagnostics purposes. - functionCallContents = this.GetFunctionCallContents(toolCalls).ToArray(); - } - finally - { - activity?.EndStreaming(streamedContents, ModelDiagnostics.IsSensitiveEventsEnabled() ? functionCallContents : null); - await responseEnumerator.DisposeAsync(); - } - } - - // If we don't have a function to invoke, we're done. - // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service - // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool - // is specified. - if (!autoInvoke || - toolCallIdsByIndex is not { Count: > 0 }) - { - yield break; - } - - // Get any response content that was streamed. - string content = contentBuilder?.ToString() ?? string.Empty; - - // Log the requests - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.Name}({fcr.Arguments})"))); - } - else if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Function call requests: {Requests}", toolCalls.Length); - } - - // Add the original assistant message to the chatOptions; this is required for the service - // to understand the tool call responses. - chatOptions.Messages.Add(GetRequestMessage(streamedRole ?? default, content, streamedName, toolCalls)); - - var chatMessageContent = this.GetChatMessage(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName); - chat.Add(chatMessageContent); - - // Respond to each tooling request. - for (int toolCallIndex = 0; toolCallIndex < toolCalls.Length; toolCallIndex++) - { - ChatCompletionsFunctionToolCall toolCall = toolCalls[toolCallIndex]; - - // We currently only know about function tool calls. If it's anything else, we'll respond with an error. - if (string.IsNullOrEmpty(toolCall.Name)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger); - continue; - } - - // Parse the function call arguments. - OpenAIFunctionToolCall? openAIFunctionToolCall; - try - { - openAIFunctionToolCall = new(toolCall); - } - catch (JsonException) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger); - continue; - } - - // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, - // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able - // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. - if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && - !IsRequestableTool(chatOptions, openAIFunctionToolCall)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger); - continue; - } - - // Find the function in the kernel and populate the arguments. - if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger); - continue; - } - - // Now, invoke the function, and add the resulting tool call message to the chat options. - FunctionResult functionResult = new(function) { Culture = kernel.Culture }; - AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, chatMessageContent) - { - ToolCallId = toolCall.Id, - Arguments = functionArgs, - RequestSequenceIndex = requestIndex - 1, - FunctionSequenceIndex = toolCallIndex, - FunctionCount = toolCalls.Length, - CancellationToken = cancellationToken - }; - - s_inflightAutoInvokes.Value++; - try - { - invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => - { - // Check if filter requested termination. - if (context.Terminate) - { - return; - } - - // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any - // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, - // as the called function could in turn telling the model about itself as a possible candidate for invocation. - context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); - }).ConfigureAwait(false); - } -#pragma warning disable CA1031 // Do not catch general exception types - catch (Exception e) -#pragma warning restore CA1031 // Do not catch general exception types - { - AddResponseMessage(chatOptions, chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger); - continue; - } - finally - { - s_inflightAutoInvokes.Value--; - } - - // Apply any changes from the auto function invocation filters context to final result. - functionResult = invocationContext.Result; - - object functionResultValue = functionResult.GetValue() ?? string.Empty; - var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); - - AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, toolCall, this.Logger); - - // If filter requested termination, returning latest function result and breaking request iteration loop. - if (invocationContext.Terminate) - { - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Filter requested termination of automatic function invocation."); - } - - var lastChatMessage = chat.Last(); - - yield return new OpenAIStreamingChatMessageContent(lastChatMessage.Role, lastChatMessage.Content); - yield break; - } - } - - // Update tool use information for the next go-around based on having completed another iteration. - Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); - - // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value. - chatOptions.ToolChoice = ChatCompletionsToolChoice.None; - chatOptions.Tools.Clear(); - - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) - { - // Don't add any tools as we've reached the maximum attempts limit. - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); - } - } - else - { - // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented - // what functions are available in the kernel. - chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions); - } - - // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'") - // if we don't send any tools in subsequent requests, even if we say not to use any. - if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None) - { - Debug.Assert(chatOptions.Tools.Count == 0); - chatOptions.Tools.Add(s_nonInvocableFunctionTool); - } - - // Disable auto invocation if we've exceeded the allowed limit. - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) - { - autoInvoke = false; - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); - } - } - } - } - - /// Checks if a tool call is for a function that was defined. - private static bool IsRequestableTool(ChatCompletionsOptions options, OpenAIFunctionToolCall ftc) - { - IList tools = options.Tools; - for (int i = 0; i < tools.Count; i++) - { - if (tools[i] is ChatCompletionsFunctionToolDefinition def && - string.Equals(def.Name, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; - } - - internal async IAsyncEnumerable GetChatAsTextStreamingContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - ChatHistory chat = CreateNewChat(prompt, chatSettings); - - await foreach (var chatUpdate in this.GetStreamingChatMessageContentsAsync(chat, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - { - yield return new StreamingTextContent(chatUpdate.Content, chatUpdate.ChoiceIndex, chatUpdate.ModelId, chatUpdate, Encoding.UTF8, chatUpdate.Metadata); - } - } - - internal async Task> GetChatAsTextContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - - ChatHistory chat = CreateNewChat(text, chatSettings); - return (await this.GetChatMessageContentsAsync(chat, chatSettings, kernel, cancellationToken).ConfigureAwait(false)) - .Select(chat => new TextContent(chat.Content, chat.ModelId, chat.Content, Encoding.UTF8, chat.Metadata)) - .ToList(); - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - /// Gets options to use for an OpenAIClient - /// Custom for HTTP requests. - /// Optional API version. - /// An instance of . - internal static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, OpenAIClientOptions.ServiceVersion? serviceVersion = null) - { - OpenAIClientOptions options = serviceVersion is not null ? - new(serviceVersion.Value) : - new(); - - options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; - options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), HttpPipelinePosition.PerCall); - - if (httpClient is not null) - { - options.Transport = new HttpClientTransport(httpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. - options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout - } - - return options; - } - - /// - /// Create a new empty chat instance - /// - /// Optional chat instructions for the AI service - /// Execution settings - /// Chat object - private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null) - { - var chat = new ChatHistory(); - - // If settings is not provided, create a new chat with the text as the system prompt - AuthorRole textRole = AuthorRole.System; - - if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt)) - { - chat.AddSystemMessage(executionSettings!.ChatSystemPrompt!); - textRole = AuthorRole.User; - } - - if (!string.IsNullOrWhiteSpace(text)) - { - chat.AddMessage(textRole, text!); - } - - return chat; - } - - private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIPromptExecutionSettings executionSettings, string deploymentOrModelName) - { - if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - var options = new CompletionsOptions - { - Prompts = { text.Replace("\r\n", "\n") }, // normalize line endings - MaxTokens = executionSettings.MaxTokens, - Temperature = (float?)executionSettings.Temperature, - NucleusSamplingFactor = (float?)executionSettings.TopP, - FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, - PresencePenalty = (float?)executionSettings.PresencePenalty, - Echo = false, - ChoicesPerPrompt = executionSettings.ResultsPerPrompt, - GenerationSampleCount = executionSettings.ResultsPerPrompt, - LogProbabilityCount = executionSettings.TopLogprobs, - User = executionSettings.User, - DeploymentName = deploymentOrModelName - }; - - if (executionSettings.TokenSelectionBiases is not null) - { - foreach (var keyValue in executionSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - } - - if (executionSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in executionSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - return options; - } - - private ChatCompletionsOptions CreateChatCompletionsOptions( - OpenAIPromptExecutionSettings executionSettings, - ChatHistory chatHistory, - Kernel? kernel, - string deploymentOrModelName) - { - if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", - JsonSerializer.Serialize(chatHistory), - JsonSerializer.Serialize(executionSettings)); - } - - var options = new ChatCompletionsOptions - { - MaxTokens = executionSettings.MaxTokens, - Temperature = (float?)executionSettings.Temperature, - NucleusSamplingFactor = (float?)executionSettings.TopP, - FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, - PresencePenalty = (float?)executionSettings.PresencePenalty, - ChoiceCount = executionSettings.ResultsPerPrompt, - DeploymentName = deploymentOrModelName, - Seed = executionSettings.Seed, - User = executionSettings.User, - LogProbabilitiesPerToken = executionSettings.TopLogprobs, - EnableLogProbabilities = executionSettings.Logprobs, - AzureExtensionsOptions = executionSettings.AzureChatExtensionsOptions - }; - - switch (executionSettings.ResponseFormat) - { - case ChatCompletionsResponseFormat formatObject: - // If the response format is an Azure SDK ChatCompletionsResponseFormat, just pass it along. - options.ResponseFormat = formatObject; - break; - - case string formatString: - // If the response format is a string, map the ones we know about, and ignore the rest. - switch (formatString) - { - case "json_object": - options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; - break; - - case "text": - options.ResponseFormat = ChatCompletionsResponseFormat.Text; - break; - } - break; - - case JsonElement formatElement: - // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. - // Handling only string formatElement. - if (formatElement.ValueKind == JsonValueKind.String) - { - string formatString = formatElement.GetString() ?? ""; - switch (formatString) - { - case "json_object": - options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; - break; - - case "text": - options.ResponseFormat = ChatCompletionsResponseFormat.Text; - break; - } - } - break; - } - - executionSettings.ToolCallBehavior?.ConfigureOptions(kernel, options); - if (executionSettings.TokenSelectionBiases is not null) - { - foreach (var keyValue in executionSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - } - - if (executionSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in executionSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) - { - options.Messages.AddRange(GetRequestMessages(new ChatMessageContent(AuthorRole.System, executionSettings!.ChatSystemPrompt), executionSettings.ToolCallBehavior)); - } - - foreach (var message in chatHistory) - { - options.Messages.AddRange(GetRequestMessages(message, executionSettings.ToolCallBehavior)); - } - - return options; - } - - private static ChatRequestMessage GetRequestMessage(ChatRole chatRole, string contents, string? name, ChatCompletionsFunctionToolCall[]? tools) - { - if (chatRole == ChatRole.User) - { - return new ChatRequestUserMessage(contents) { Name = name }; - } - - if (chatRole == ChatRole.System) - { - return new ChatRequestSystemMessage(contents) { Name = name }; - } - - if (chatRole == ChatRole.Assistant) - { - var msg = new ChatRequestAssistantMessage(contents) { Name = name }; - if (tools is not null) - { - foreach (ChatCompletionsFunctionToolCall tool in tools) - { - msg.ToolCalls.Add(tool); - } - } - return msg; - } - - throw new NotImplementedException($"Role {chatRole} is not implemented"); - } - - private static List GetRequestMessages(ChatMessageContent message, ToolCallBehavior? toolCallBehavior) - { - if (message.Role == AuthorRole.System) - { - return [new ChatRequestSystemMessage(message.Content) { Name = message.AuthorName }]; - } - - if (message.Role == AuthorRole.Tool) - { - // Handling function results represented by the TextContent type. - // Example: new ChatMessageContent(AuthorRole.Tool, content, metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }) - if (message.Metadata?.TryGetValue(OpenAIChatMessageContent.ToolIdProperty, out object? toolId) is true && - toolId?.ToString() is string toolIdString) - { - return [new ChatRequestToolMessage(message.Content, toolIdString)]; - } - - // Handling function results represented by the FunctionResultContent type. - // Example: new ChatMessageContent(AuthorRole.Tool, items: new ChatMessageContentItemCollection { new FunctionResultContent(functionCall, result) }) - List? toolMessages = null; - foreach (var item in message.Items) - { - if (item is not FunctionResultContent resultContent) - { - continue; - } - - toolMessages ??= []; - - if (resultContent.Result is Exception ex) - { - toolMessages.Add(new ChatRequestToolMessage($"Error: Exception while invoking function. {ex.Message}", resultContent.CallId)); - continue; - } - - var stringResult = ProcessFunctionResult(resultContent.Result ?? string.Empty, toolCallBehavior); - - toolMessages.Add(new ChatRequestToolMessage(stringResult ?? string.Empty, resultContent.CallId)); - } - - if (toolMessages is not null) - { - return toolMessages; - } - - throw new NotSupportedException("No function result provided in the tool message."); - } - - if (message.Role == AuthorRole.User) - { - if (message.Items is { Count: 1 } && message.Items.FirstOrDefault() is TextContent textContent) - { - return [new ChatRequestUserMessage(textContent.Text) { Name = message.AuthorName }]; - } - - return [new ChatRequestUserMessage(message.Items.Select(static (KernelContent item) => (ChatMessageContentItem)(item switch - { - TextContent textContent => new ChatMessageTextContentItem(textContent.Text), - ImageContent imageContent => GetImageContentItem(imageContent), - _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") - }))) - { Name = message.AuthorName }]; - } - - if (message.Role == AuthorRole.Assistant) - { - var asstMessage = new ChatRequestAssistantMessage(message.Content) { Name = message.AuthorName }; - - // Handling function calls supplied via either: - // ChatCompletionsToolCall.ToolCalls collection items or - // ChatMessageContent.Metadata collection item with 'ChatResponseMessage.FunctionToolCalls' key. - IEnumerable? tools = (message as OpenAIChatMessageContent)?.ToolCalls; - if (tools is null && message.Metadata?.TryGetValue(OpenAIChatMessageContent.FunctionToolCallsProperty, out object? toolCallsObject) is true) - { - tools = toolCallsObject as IEnumerable; - if (tools is null && toolCallsObject is JsonElement { ValueKind: JsonValueKind.Array } array) - { - int length = array.GetArrayLength(); - var ftcs = new List(length); - for (int i = 0; i < length; i++) - { - JsonElement e = array[i]; - if (e.TryGetProperty("Id", out JsonElement id) && - e.TryGetProperty("Name", out JsonElement name) && - e.TryGetProperty("Arguments", out JsonElement arguments) && - id.ValueKind == JsonValueKind.String && - name.ValueKind == JsonValueKind.String && - arguments.ValueKind == JsonValueKind.String) - { - ftcs.Add(new ChatCompletionsFunctionToolCall(id.GetString()!, name.GetString()!, arguments.GetString()!)); - } - } - tools = ftcs; - } - } - - if (tools is not null) - { - asstMessage.ToolCalls.AddRange(tools); - } - - // Handling function calls supplied via ChatMessageContent.Items collection elements of the FunctionCallContent type. - HashSet? functionCallIds = null; - foreach (var item in message.Items) - { - if (item is not FunctionCallContent callRequest) - { - continue; - } - - functionCallIds ??= new HashSet(asstMessage.ToolCalls.Select(t => t.Id)); - - if (callRequest.Id is null || functionCallIds.Contains(callRequest.Id)) - { - continue; - } - - var argument = JsonSerializer.Serialize(callRequest.Arguments); - - asstMessage.ToolCalls.Add(new ChatCompletionsFunctionToolCall(callRequest.Id, FunctionName.ToFullyQualifiedName(callRequest.FunctionName, callRequest.PluginName, OpenAIFunction.NameSeparator), argument ?? string.Empty)); - } - - return [asstMessage]; - } - - throw new NotSupportedException($"Role {message.Role} is not supported."); - } - - private static ChatMessageImageContentItem GetImageContentItem(ImageContent imageContent) - { - if (imageContent.Data is { IsEmpty: false } data) - { - return new ChatMessageImageContentItem(BinaryData.FromBytes(data), imageContent.MimeType); - } - - if (imageContent.Uri is not null) - { - return new ChatMessageImageContentItem(imageContent.Uri); - } - - throw new ArgumentException($"{nameof(ImageContent)} must have either Data or a Uri."); - } - - private static ChatRequestMessage GetRequestMessage(ChatResponseMessage message) - { - if (message.Role == ChatRole.System) - { - return new ChatRequestSystemMessage(message.Content); - } - - if (message.Role == ChatRole.Assistant) - { - var msg = new ChatRequestAssistantMessage(message.Content); - if (message.ToolCalls is { Count: > 0 } tools) - { - foreach (ChatCompletionsToolCall tool in tools) - { - msg.ToolCalls.Add(tool); - } - } - - return msg; - } - - if (message.Role == ChatRole.User) - { - return new ChatRequestUserMessage(message.Content); - } - - throw new NotSupportedException($"Role {message.Role} is not supported."); - } - - private OpenAIChatMessageContent GetChatMessage(ChatChoice chatChoice, ChatCompletions responseData) - { - var message = new OpenAIChatMessageContent(chatChoice.Message, this.DeploymentOrModelName, GetChatChoiceMetadata(responseData, chatChoice)); - - message.Items.AddRange(this.GetFunctionCallContents(chatChoice.Message.ToolCalls)); - - return message; - } - - private OpenAIChatMessageContent GetChatMessage(ChatRole chatRole, string content, ChatCompletionsFunctionToolCall[] toolCalls, FunctionCallContent[]? functionCalls, IReadOnlyDictionary? metadata, string? authorName) - { - var message = new OpenAIChatMessageContent(chatRole, content, this.DeploymentOrModelName, toolCalls, metadata) - { - AuthorName = authorName, - }; - - if (functionCalls is not null) - { - message.Items.AddRange(functionCalls); - } - - return message; - } - - private IEnumerable GetFunctionCallContents(IEnumerable toolCalls) - { - List? result = null; - - foreach (var toolCall in toolCalls) - { - // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property. - // This allows consumers to work with functions in an LLM-agnostic way. - if (toolCall is ChatCompletionsFunctionToolCall functionToolCall) - { - Exception? exception = null; - KernelArguments? arguments = null; - try - { - arguments = JsonSerializer.Deserialize(functionToolCall.Arguments); - if (arguments is not null) - { - // Iterate over copy of the names to avoid mutating the dictionary while enumerating it - var names = arguments.Names.ToArray(); - foreach (var name in names) - { - arguments[name] = arguments[name]?.ToString(); - } - } - } - catch (JsonException ex) - { - exception = new KernelException("Error: Function call arguments were invalid JSON.", ex); - - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", functionToolCall.Name, functionToolCall.Id); - } - } - - var functionName = FunctionName.Parse(functionToolCall.Name, OpenAIFunction.NameSeparator); - - var functionCallContent = new FunctionCallContent( - functionName: functionName.Name, - pluginName: functionName.PluginName, - id: functionToolCall.Id, - arguments: arguments) - { - InnerContent = functionToolCall, - Exception = exception - }; - - result ??= []; - result.Add(functionCallContent); - } - } - - return result ?? Enumerable.Empty(); - } - - private static void AddResponseMessage(ChatCompletionsOptions chatOptions, ChatHistory chat, string? result, string? errorMessage, ChatCompletionsToolCall toolCall, ILogger logger) - { - // Log any error - if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug)) - { - Debug.Assert(result is null); - logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, errorMessage); - } - - // Add the tool response message to the chat options - result ??= errorMessage ?? string.Empty; - chatOptions.Messages.Add(new ChatRequestToolMessage(result, toolCall.Id)); - - // Add the tool response message to the chat history. - var message = new ChatMessageContent(role: AuthorRole.Tool, content: result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }); - - if (toolCall is ChatCompletionsFunctionToolCall functionCall) - { - // Add an item of type FunctionResultContent to the ChatMessageContent.Items collection in addition to the function result stored as a string in the ChatMessageContent.Content property. - // This will enable migration to the new function calling model and facilitate the deprecation of the current one in the future. - var functionName = FunctionName.Parse(functionCall.Name, OpenAIFunction.NameSeparator); - message.Items.Add(new FunctionResultContent(functionName.Name, functionName.PluginName, functionCall.Id, result)); - } - - chat.Add(message); - } - - private static void ValidateMaxTokens(int? maxTokens) - { - if (maxTokens.HasValue && maxTokens < 1) - { - throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); - } - } - - private static void ValidateAutoInvoke(bool autoInvoke, int resultsPerPrompt) - { - if (autoInvoke && resultsPerPrompt != 1) - { - // We can remove this restriction in the future if valuable. However, multiple results per prompt is rare, - // and limiting this significantly curtails the complexity of the implementation. - throw new ArgumentException($"Auto-invocation of tool calls may only be used with a {nameof(OpenAIPromptExecutionSettings.ResultsPerPrompt)} of 1."); - } - } - - private static async Task RunRequestAsync(Func> request) - { - try - { - return await request.Invoke().ConfigureAwait(false); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - } - - /// - /// Captures usage details, including token information. - /// - /// Instance of with usage details. - private void LogUsage(CompletionsUsage usage) - { - if (usage is null) - { - this.Logger.LogDebug("Token usage information unavailable."); - return; - } - - if (this.Logger.IsEnabled(LogLevel.Information)) - { - this.Logger.LogInformation( - "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", - usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens); - } - - s_promptTokensCounter.Add(usage.PromptTokens); - s_completionTokensCounter.Add(usage.CompletionTokens); - s_totalTokensCounter.Add(usage.TotalTokens); - } - - /// - /// Processes the function result. - /// - /// The result of the function call. - /// The ToolCallBehavior object containing optional settings like JsonSerializerOptions.TypeInfoResolver. - /// A string representation of the function result. - private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior) - { - if (functionResult is string stringResult) - { - return stringResult; - } - - // This is an optimization to use ChatMessageContent content directly - // without unnecessary serialization of the whole message content class. - if (functionResult is ChatMessageContent chatMessageContent) - { - return chatMessageContent.ToString(); - } - - // For polymorphic serialization of unknown in advance child classes of the KernelContent class, - // a corresponding JsonTypeInfoResolver should be provided via the JsonSerializerOptions.TypeInfoResolver property. - // For more details about the polymorphic serialization, see the article at: - // https://learn.microsoft.com/en-us/dotnet/standard/serialization/system-text-json/polymorphism?pivots=dotnet-8-0 -#pragma warning disable CS0618 // Type or member is obsolete - return JsonSerializer.Serialize(functionResult, toolCallBehavior?.ToolCallResultSerializerOptions); -#pragma warning restore CS0618 // Type or member is obsolete - } - - /// - /// Executes auto function invocation filters and/or function itself. - /// This method can be moved to when auto function invocation logic will be extracted to common place. - /// - private static async Task OnAutoFunctionInvocationAsync( - Kernel kernel, - AutoFunctionInvocationContext context, - Func functionCallCallback) - { - await InvokeFilterOrFunctionAsync(kernel.AutoFunctionInvocationFilters, functionCallCallback, context).ConfigureAwait(false); - - return context; - } - - /// - /// This method will execute auto function invocation filters and function recursively. - /// If there are no registered filters, just function will be executed. - /// If there are registered filters, filter on position will be executed. - /// Second parameter of filter is callback. It can be either filter on + 1 position or function if there are no remaining filters to execute. - /// Function will be always executed as last step after all filters. - /// - private static async Task InvokeFilterOrFunctionAsync( - IList? autoFunctionInvocationFilters, - Func functionCallCallback, - AutoFunctionInvocationContext context, - int index = 0) - { - if (autoFunctionInvocationFilters is { Count: > 0 } && index < autoFunctionInvocationFilters.Count) - { - await autoFunctionInvocationFilters[index].OnAutoFunctionInvocationAsync(context, - (context) => InvokeFilterOrFunctionAsync(autoFunctionInvocationFilters, functionCallCallback, context, index + 1)).ConfigureAwait(false); - } - else - { - await functionCallCallback(context).ConfigureAwait(false); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs deleted file mode 100644 index e0f5733dd5c0..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI.Core.AzureSdk; - -internal sealed class CustomHostPipelinePolicy : HttpPipelineSynchronousPolicy -{ - private readonly Uri _endpoint; - - internal CustomHostPipelinePolicy(Uri endpoint) - { - this._endpoint = endpoint; - } - - public override void OnSendingRequest(HttpMessage message) - { - // Update current host to provided endpoint - message.Request?.Uri.Reset(this._endpoint); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs deleted file mode 100644 index 32cc0ab22f19..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Runtime.CompilerServices; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI.Core.AzureSdk; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Core implementation for OpenAI clients, providing common functionality and properties. -/// -internal sealed class OpenAIClientCore : ClientCore -{ - private const string DefaultPublicEndpoint = "https://api.openai.com/v1"; - - /// - /// Gets the attribute name used to store the organization in the dictionary. - /// - public static string OrganizationKey => "Organization"; - - /// - /// OpenAI / Azure OpenAI Client - /// - internal override OpenAIClient Client { get; } - - /// - /// Initializes a new instance of the class. - /// - /// Model name. - /// OpenAI API Key. - /// OpenAI compatible API endpoint. - /// OpenAI Organization Id (usually optional). - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAIClientCore( - string modelId, - string? apiKey = null, - Uri? endpoint = null, - string? organization = null, - HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(modelId); - - this.DeploymentOrModelName = modelId; - - var options = GetOpenAIClientOptions(httpClient); - - if (!string.IsNullOrWhiteSpace(organization)) - { - options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organization!), HttpPipelinePosition.PerCall); - } - - // Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint. - var providedEndpoint = endpoint ?? httpClient?.BaseAddress; - if (providedEndpoint is null) - { - Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided. - this.Endpoint = new Uri(DefaultPublicEndpoint); - } - else - { - options.AddPolicy(new CustomHostPipelinePolicy(providedEndpoint), Azure.Core.HttpPipelinePosition.PerRetry); - this.Endpoint = providedEndpoint; - } - - this.Client = new OpenAIClient(apiKey ?? string.Empty, options); - } - - /// - /// Initializes a new instance of the class using the specified OpenAIClient. - /// Note: instances created this way might not have the default diagnostics settings, - /// it's up to the caller to configure the client. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - internal OpenAIClientCore( - string modelId, - OpenAIClient openAIClient, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNull(openAIClient); - - this.DeploymentOrModelName = modelId; - this.Client = openAIClient; - } - - /// - /// Logs OpenAI action details. - /// - /// Caller member name. Populated automatically by runtime. - internal void LogActionDetails([CallerMemberName] string? callerMemberName = default) - { - if (this.Logger.IsEnabled(LogLevel.Information)) - { - this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.DeploymentOrModelName); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs deleted file mode 100644 index 126e1615a747..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI and OpenAI Specialized streaming text content. -/// -/// -/// Represents a text content chunk that was streamed from the remote model. -/// -public sealed class OpenAIStreamingTextContent : StreamingTextContent -{ - /// - /// Create a new instance of the class. - /// - /// Text update - /// Index of the choice - /// The model ID used to generate the content - /// Inner chunk object - /// Metadata information - internal OpenAIStreamingTextContent( - string text, - int choiceIndex, - string modelId, - object? innerContentObject = null, - IReadOnlyDictionary? metadata = null) - : base( - text, - choiceIndex, - modelId, - innerContentObject, - Encoding.UTF8, - metadata) - { - } - - /// - public override byte[] ToByteArray() - { - return this.Encoding.GetBytes(this.ToString()); - } - - /// - public override string ToString() - { - return this.Text ?? string.Empty; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs deleted file mode 100644 index 7f3daaa2d941..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text-to-audio client for HTTP operations. -/// -[Experimental("SKEXP0001")] -internal sealed class OpenAITextToAudioClient -{ - private readonly ILogger _logger; - private readonly HttpClient _httpClient; - - private readonly string _modelId; - private readonly string _apiKey; - private readonly string? _organization; - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Creates an instance of the with API key auth. - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAITextToAudioClient( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - this._modelId = modelId; - this._apiKey = apiKey; - this._organization = organization; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - internal async Task> GetAudioContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - OpenAITextToAudioExecutionSettings? audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.NotNullOrWhiteSpace(audioExecutionSettings?.Voice); - - using var request = this.GetRequest(text, audioExecutionSettings); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - var data = await response.Content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false); - - return [new(data, this._modelId)]; - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - #region private - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Authorization", $"Bearer {this._apiKey}"); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAITextToAudioClient))); - - if (!string.IsNullOrWhiteSpace(this._organization)) - { - request.Headers.Add("OpenAI-Organization", this._organization); - } - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on text-to-audio request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private HttpRequestMessage GetRequest(string text, OpenAITextToAudioExecutionSettings executionSettings) - { - const string DefaultBaseUrl = "https://api.openai.com"; - - var baseUrl = !string.IsNullOrWhiteSpace(this._httpClient.BaseAddress?.AbsoluteUri) ? - this._httpClient.BaseAddress!.AbsoluteUri : - DefaultBaseUrl; - - var payload = new TextToAudioRequest(this._modelId, text, executionSettings.Voice) - { - ResponseFormat = executionSettings.ResponseFormat, - Speed = executionSettings.Speed - }; - - return HttpRequest.CreatePostRequest($"{baseUrl.TrimEnd('/')}/v1/audio/speech", payload); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs deleted file mode 100644 index 7f49e74c5fa4..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Required configuration for Azure OpenAI chat completion with data. -/// More information: -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public class AzureOpenAIChatCompletionWithDataConfig -{ - /// - /// Azure OpenAI model ID or deployment name, see - /// - public string CompletionModelId { get; set; } = string.Empty; - - /// - /// Azure OpenAI deployment URL, see - /// - public string CompletionEndpoint { get; set; } = string.Empty; - - /// - /// Azure OpenAI API key, see - /// - public string CompletionApiKey { get; set; } = string.Empty; - - /// - /// Azure OpenAI Completion API version (e.g. 2024-02-01) - /// - public string CompletionApiVersion { get; set; } = string.Empty; - - /// - /// Data source endpoint URL. - /// For Azure AI Search, see - /// - public string DataSourceEndpoint { get; set; } = string.Empty; - - /// - /// Data source API key. - /// For Azure AI Search keys, see - /// - public string DataSourceApiKey { get; set; } = string.Empty; - - /// - /// Data source index name. - /// For Azure AI Search indexes, see - /// - public string DataSourceIndex { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs deleted file mode 100644 index 793209704bbf..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs +++ /dev/null @@ -1,305 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.Text; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI Chat Completion with data service. -/// More information: -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIChatCompletionWithDataService : IChatCompletionService, ITextGenerationService -{ - /// - /// Initializes a new instance of the class. - /// - /// Instance of class with completion configuration. - /// Custom for HTTP requests. - /// Instance of to use for logging. - public AzureOpenAIChatCompletionWithDataService( - AzureOpenAIChatCompletionWithDataConfig config, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this.ValidateConfig(config); - - this._config = config; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = loggerFactory?.CreateLogger(this.GetType()) ?? NullLogger.Instance; - this._attributes.Add(AIServiceExtensions.ModelIdKey, config.CompletionModelId); - } - - /// - public IReadOnlyDictionary Attributes => this._attributes; - - /// - public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this.InternalGetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); - - /// - public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this.InternalGetChatStreamingContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); - - /// - public async Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return (await this.GetChatMessageContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - .Select(chat => new TextContent(chat.Content, chat.ModelId, chat, Encoding.UTF8, chat.Metadata)) - .ToList(); - } - - /// - public async IAsyncEnumerable GetStreamingTextContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings = null, - Kernel? kernel = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var streamingChatContent in this.InternalGetChatStreamingContentsAsync(new ChatHistory(prompt), executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - { - yield return new StreamingTextContent(streamingChatContent.Content, streamingChatContent.ChoiceIndex, streamingChatContent.ModelId, streamingChatContent, Encoding.UTF8, streamingChatContent.Metadata); - } - } - - #region private ================================================================================ - - private const string DefaultApiVersion = "2024-02-01"; - - private readonly AzureOpenAIChatCompletionWithDataConfig _config; - - private readonly HttpClient _httpClient; - private readonly ILogger _logger; - private readonly Dictionary _attributes = []; - private void ValidateConfig(AzureOpenAIChatCompletionWithDataConfig config) - { - Verify.NotNull(config); - - Verify.NotNullOrWhiteSpace(config.CompletionModelId); - Verify.NotNullOrWhiteSpace(config.CompletionEndpoint); - Verify.NotNullOrWhiteSpace(config.CompletionApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceEndpoint); - Verify.NotNullOrWhiteSpace(config.DataSourceApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceIndex); - } - - private async Task> InternalGetChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - var openAIExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - using var request = this.GetRequest(chat, openAIExecutionSettings, isStreamEnabled: false); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - var chatWithDataResponse = this.DeserializeResponse(body); - IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); - - return chatWithDataResponse.Choices.Select(choice => new AzureOpenAIWithDataChatMessageContent(choice, this.GetModelId(), metadata)).ToList(); - } - - private static Dictionary GetResponseMetadata(ChatWithDataResponse chatResponse) - { - return new Dictionary(5) - { - { nameof(chatResponse.Id), chatResponse.Id }, - { nameof(chatResponse.Model), chatResponse.Model }, - { nameof(chatResponse.Created), chatResponse.Created }, - { nameof(chatResponse.Object), chatResponse.Object }, - { nameof(chatResponse.Usage), chatResponse.Usage }, - }; - } - - private static Dictionary GetResponseMetadata(ChatWithDataStreamingResponse chatResponse) - { - return new Dictionary(4) - { - { nameof(chatResponse.Id), chatResponse.Id }, - { nameof(chatResponse.Model), chatResponse.Model }, - { nameof(chatResponse.Created), chatResponse.Created }, - { nameof(chatResponse.Object), chatResponse.Object }, - }; - } - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken = default) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Api-Key", this._config.CompletionApiKey); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureOpenAIChatCompletionWithDataService))); - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on chat completion with data request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private async IAsyncEnumerable InternalGetChatStreamingContentsAsync( - ChatHistory chatHistory, - PromptExecutionSettings? executionSettings = null, - Kernel? kernel = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatRequestSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); - - using var request = this.GetRequest(chatHistory, chatRequestSettings, isStreamEnabled: true); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - const string ServerEventPayloadPrefix = "data:"; - - using var stream = await response.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false); - using var reader = new StreamReader(stream); - - while (!reader.EndOfStream) - { - var body = await reader.ReadLineAsync( -#if NET - cancellationToken -#endif - ).ConfigureAwait(false); - - if (string.IsNullOrWhiteSpace(body)) - { - continue; - } - - if (body.StartsWith(ServerEventPayloadPrefix, StringComparison.Ordinal)) - { - body = body.Substring(ServerEventPayloadPrefix.Length); - } - - var chatWithDataResponse = this.DeserializeResponse(body); - IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); - - foreach (var choice in chatWithDataResponse.Choices) - { - yield return new AzureOpenAIWithDataStreamingChatMessageContent(choice, choice.Index, this.GetModelId()!, metadata); - } - } - } - - private T DeserializeResponse(string body) - { - var response = JsonSerializer.Deserialize(body, JsonOptionsCache.ReadPermissive); - - if (response is null) - { - const string ErrorMessage = "Error occurred on chat completion with data response deserialization"; - - this._logger.LogError(ErrorMessage); - - throw new KernelException(ErrorMessage); - } - - return response; - } - - private HttpRequestMessage GetRequest( - ChatHistory chat, - OpenAIPromptExecutionSettings executionSettings, - bool isStreamEnabled) - { - var payload = new ChatWithDataRequest - { - Temperature = executionSettings.Temperature, - TopP = executionSettings.TopP, - IsStreamEnabled = isStreamEnabled, - StopSequences = executionSettings.StopSequences, - MaxTokens = executionSettings.MaxTokens, - PresencePenalty = executionSettings.PresencePenalty, - FrequencyPenalty = executionSettings.FrequencyPenalty, - TokenSelectionBiases = executionSettings.TokenSelectionBiases ?? new Dictionary(), - DataSources = this.GetDataSources(), - Messages = this.GetMessages(chat) - }; - - return HttpRequest.CreatePostRequest(this.GetRequestUri(), payload); - } - - private List GetDataSources() - { - return - [ - new() - { - Parameters = new ChatWithDataSourceParameters - { - Endpoint = this._config.DataSourceEndpoint, - ApiKey = this._config.DataSourceApiKey, - IndexName = this._config.DataSourceIndex - } - } - ]; - } - - private List GetMessages(ChatHistory chat) - { - // The system role as the unique message is not allowed in the With Data APIs. - // This avoids the error: Invalid message request body. Learn how to use Completions extension API, please refer to https://learn.microsoft.com/azure/ai-services/openai/reference#completions-extensions - if (chat.Count == 1 && chat[0].Role == AuthorRole.System) - { - // Converts a system message to a user message if is the unique message in the chat. - chat[0].Role = AuthorRole.User; - } - - return chat - .Select(message => new ChatWithDataMessage - { - Role = message.Role.Label, - Content = message.Content ?? string.Empty - }) - .ToList(); - } - - private string GetRequestUri() - { - const string EndpointUriFormat = "{0}/openai/deployments/{1}/extensions/chat/completions?api-version={2}"; - - var apiVersion = this._config.CompletionApiVersion; - - if (string.IsNullOrWhiteSpace(apiVersion)) - { - apiVersion = DefaultApiVersion; - } - - return string.Format( - CultureInfo.InvariantCulture, - EndpointUriFormat, - this._config.CompletionEndpoint.TrimEnd('/'), - this._config.CompletionModelId, - apiVersion); - } - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs deleted file mode 100644 index ce3a5e5465e3..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataMessage -{ - [JsonPropertyName("role")] - public string Role { get; set; } = string.Empty; - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs deleted file mode 100644 index 214b917a8a13..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataRequest -{ - [JsonPropertyName("temperature")] - public double Temperature { get; set; } = 0; - - [JsonPropertyName("top_p")] - public double TopP { get; set; } = 0; - - [JsonPropertyName("stream")] - public bool IsStreamEnabled { get; set; } - - [JsonPropertyName("stop")] - public IList? StopSequences { get; set; } = Array.Empty(); - - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - [JsonPropertyName("presence_penalty")] - public double PresencePenalty { get; set; } = 0; - - [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty { get; set; } = 0; - - [JsonPropertyName("logit_bias")] - public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); - - [JsonPropertyName("dataSources")] - public IList DataSources { get; set; } = Array.Empty(); - - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataSource -{ - [JsonPropertyName("type")] - // The current API only supports "AzureCognitiveSearch" as name otherwise an error is returned. - // Validation error at #/dataSources/0: Input tag 'AzureAISearch' found using 'type' does not match any of - // the expected tags: 'AzureCognitiveSearch', 'Elasticsearch', 'AzureCosmosDB', 'Pinecone', 'AzureMLIndex', 'Microsoft365' - public string Type { get; set; } = "AzureCognitiveSearch"; - - [JsonPropertyName("parameters")] - public ChatWithDataSourceParameters Parameters { get; set; } = new ChatWithDataSourceParameters(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataSourceParameters -{ - [JsonPropertyName("endpoint")] - public string Endpoint { get; set; } = string.Empty; - - [JsonPropertyName("key")] - public string ApiKey { get; set; } = string.Empty; - - [JsonPropertyName("indexName")] - public string IndexName { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs deleted file mode 100644 index 4ba5e7761319..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[method: JsonConstructor] -internal sealed class ChatWithDataResponse(ChatWithDataUsage usage) -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); - - [JsonPropertyName("usage")] - public ChatWithDataUsage Usage { get; set; } = usage; - - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - [JsonPropertyName("object")] - public string Object { get; set; } = string.Empty; -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataUsage -{ - [JsonPropertyName("prompt_tokens")] - public int PromptTokens { get; set; } - - [JsonPropertyName("completion_tokens")] - public int CompletionTokens { get; set; } - - [JsonPropertyName("total_tokens")] - public int TotalTokens { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs deleted file mode 100644 index 9455553d9642..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingResponse -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - [JsonPropertyName("object")] - public string Object { get; set; } = string.Empty; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); - - [JsonPropertyName("index")] - public int Index { get; set; } = 0; -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingMessage -{ - [JsonPropertyName("delta")] - public ChatWithDataStreamingDelta Delta { get; set; } = new(); - - [JsonPropertyName("end_turn")] - public bool EndTurn { get; set; } -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataStreamingDelta -{ - [JsonPropertyName("role")] - public string? Role { get; set; } - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj index f873d8d9cd29..30b637922494 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -10,18 +10,24 @@ true + + rc + + + - Semantic Kernel - OpenAI and Azure OpenAI connectors - Semantic Kernel connectors for OpenAI and Azure OpenAI. Contains clients for text generation, chat completion, embedding and DALL-E text to image. + Semantic Kernel - OpenAI connector + Semantic Kernel connectors for OpenAI. Contains clients for chat completion, embedding and DALL-E text to image. - + + @@ -29,6 +35,7 @@ - + + diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs new file mode 100644 index 000000000000..48ddee6955c8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Audio; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Input audio to generate the text + /// Audio-to-text execution settings for the prompt + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task> GetTextFromAudioContentsAsync( + string targetModel, + AudioContent input, + PromptExecutionSettings? executionSettings, + CancellationToken cancellationToken) + { + if (!input.CanRead) + { + throw new ArgumentException("The input audio content is not readable.", nameof(input)); + } + + OpenAIAudioToTextExecutionSettings audioExecutionSettings = OpenAIAudioToTextExecutionSettings.FromExecutionSettings(executionSettings)!; + AudioTranscriptionOptions? audioOptions = AudioOptionsFromExecutionSettings(audioExecutionSettings); + + Verify.ValidFilename(audioExecutionSettings?.Filename); + + using var memoryStream = new MemoryStream(input.Data!.Value.ToArray()); + + AudioTranscription responseData = (await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).TranscribeAudioAsync(memoryStream, audioExecutionSettings?.Filename, audioOptions)).ConfigureAwait(false)).Value; + + return [new(responseData.Text, targetModel, metadata: GetResponseMetadata(responseData))]; + } + + /// + /// Converts to type. + /// + /// Instance of . + /// Instance of . + private static AudioTranscriptionOptions AudioOptionsFromExecutionSettings(OpenAIAudioToTextExecutionSettings executionSettings) + => new() + { + Granularities = AudioTimestampGranularities.Default, + Language = executionSettings.Language, + Prompt = executionSettings.Prompt, + Temperature = executionSettings.Temperature, + ResponseFormat = ConvertResponseFormat(executionSettings.ResponseFormat) + }; + + private static AudioTranscriptionFormat? ConvertResponseFormat(string? responseFormat) + { + return responseFormat switch + { + "json" => AudioTranscriptionFormat.Simple, + "verbose_json" => AudioTranscriptionFormat.Verbose, + "vtt" => AudioTranscriptionFormat.Vtt, + "srt" => AudioTranscriptionFormat.Srt, + null => null, + _ => throw new NotSupportedException($"The audio transcription format '{responseFormat}' is not supported."), + }; + } + + private static Dictionary GetResponseMetadata(AudioTranscription audioTranscription) + => new(3) + { + [nameof(audioTranscription.Language)] = audioTranscription.Language, + [nameof(audioTranscription.Duration)] = audioTranscription.Duration, + [nameof(audioTranscription.Segments)] = audioTranscription.Segments + }; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs new file mode 100644 index 000000000000..1c2269a9f966 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs @@ -0,0 +1,1183 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Diagnostics; +using OpenAI.Chat; +using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + protected const string ModelProvider = "openai"; + protected record ToolCallingConfig(IList? Tools, ChatToolChoice? Choice, bool AutoInvoke); + + /// + /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current + /// asynchronous chain of execution. + /// + /// + /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that + /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, + /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close + /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. + /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in + /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that + /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, + /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent + /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made + /// configurable should need arise. + /// + protected const int MaxInflightAutoInvokes = 128; + + /// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy. + protected static readonly ChatTool s_nonInvocableFunctionTool = ChatTool.CreateFunctionTool("NonInvocableTool"); + + /// Tracking for . + protected static readonly AsyncLocal s_inflightAutoInvokes = new(); + + /// + /// Instance of for metrics. + /// + protected static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI"); + + /// + /// Instance of to keep track of the number of prompt tokens used. + /// + protected static readonly Counter s_promptTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.prompt", + unit: "{token}", + description: "Number of prompt tokens used"); + + /// + /// Instance of to keep track of the number of completion tokens used. + /// + protected static readonly Counter s_completionTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.completion", + unit: "{token}", + description: "Number of completion tokens used"); + + /// + /// Instance of to keep track of the total number of tokens used. + /// + protected static readonly Counter s_totalTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.total", + unit: "{token}", + description: "Number of tokens used"); + + protected virtual Dictionary GetChatCompletionMetadata(OpenAIChatCompletion completions) + { + return new Dictionary + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.CreatedAt), completions.CreatedAt }, + { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, + { nameof(completions.Usage), completions.Usage }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(completions.FinishReason), completions.FinishReason.ToString() }, + { nameof(completions.ContentTokenLogProbabilities), completions.ContentTokenLogProbabilities }, + }; + } + + protected static Dictionary GetChatCompletionMetadata(StreamingChatCompletionUpdate completionUpdate) + { + return new Dictionary + { + { nameof(completionUpdate.Id), completionUpdate.Id }, + { nameof(completionUpdate.CreatedAt), completionUpdate.CreatedAt }, + { nameof(completionUpdate.SystemFingerprint), completionUpdate.SystemFingerprint }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(completionUpdate.FinishReason), completionUpdate.FinishReason?.ToString() }, + }; + } + + /// + /// Generate a new chat message + /// + /// Model identifier + /// Chat history + /// Execution settings for the completion API. + /// The containing services, plugins, and other state for use throughout the operation. + /// Async cancellation token + /// Generated chat message in string format + internal async Task> GetChatMessageContentsAsync( + string targetModel, + ChatHistory chat, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + Verify.NotNull(chat); + + if (this.Logger!.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chat), + JsonSerializer.Serialize(executionSettings)); + } + + // Convert the incoming execution settings to OpenAI settings. + OpenAIPromptExecutionSettings chatExecutionSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + for (int requestIndex = 0; ; requestIndex++) + { + var chatForRequest = CreateChatCompletionMessages(chatExecutionSettings, chat); + + var toolCallingConfig = this.GetToolCallingConfiguration(kernel, chatExecutionSettings, requestIndex); + + var chatOptions = this.CreateChatCompletionOptions(chatExecutionSettings, chat, toolCallingConfig, kernel); + + // Make the request. + OpenAIChatCompletion? chatCompletion = null; + OpenAIChatMessageContent chatMessageContent; + using (var activity = this.StartCompletionActivity(chat, chatExecutionSettings)) + { + try + { + chatCompletion = (await RunRequestAsync(() => this.Client!.GetChatClient(targetModel).CompleteChatAsync(chatForRequest, chatOptions, cancellationToken)).ConfigureAwait(false)).Value; + + this.LogUsage(chatCompletion.Usage); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + if (chatCompletion != null) + { + // Capture available metadata even if the operation failed. + activity + .SetResponseId(chatCompletion.Id) + .SetPromptTokenUsage(chatCompletion.Usage.InputTokens) + .SetCompletionTokenUsage(chatCompletion.Usage.OutputTokens); + } + throw; + } + + chatMessageContent = this.CreateChatMessageContent(chatCompletion, targetModel); + activity?.SetCompletionResponse([chatMessageContent], chatCompletion.Usage.InputTokens, chatCompletion.Usage.OutputTokens); + } + + // If we don't want to attempt to invoke any functions, just return the result. + if (!toolCallingConfig.AutoInvoke) + { + return [chatMessageContent]; + } + + // Get our single result and extract the function call information. If this isn't a function call, or if it is + // but we're unable to find the function or extract the relevant information, just return the single result. + // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service + // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool + // is specified. + if (chatCompletion.ToolCalls.Count == 0) + { + return [chatMessageContent]; + } + + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Tool requests: {Requests}", chatCompletion.ToolCalls.Count); + } + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", chatCompletion.ToolCalls.OfType().Select(ftc => $"{ftc.FunctionName}({ftc.FunctionArguments})"))); + } + + // Add the result message to the caller's chat history; + // this is required for the service to understand the tool call responses. + chat.Add(chatMessageContent); + + // We must send back a response for every tool call, regardless of whether we successfully executed it or not. + // If we successfully execute it, we'll add the result. If we don't, we'll add an error. + for (int toolCallIndex = 0; toolCallIndex < chatMessageContent.ToolCalls.Count; toolCallIndex++) + { + ChatToolCall functionToolCall = chatMessageContent.ToolCalls[toolCallIndex]; + + // We currently only know about function tool calls. If it's anything else, we'll respond with an error. + if (functionToolCall.Kind != ChatToolCallKind.Function) + { + AddResponseMessage(chat, result: null, "Error: Tool call was not a function call.", functionToolCall, this.Logger); + continue; + } + + // Parse the function call arguments. + OpenAIFunctionToolCall? openAIFunctionToolCall; + try + { + openAIFunctionToolCall = new(functionToolCall); + } + catch (JsonException) + { + AddResponseMessage(chat, result: null, "Error: Function call arguments were invalid JSON.", functionToolCall, this.Logger); + continue; + } + + // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, + // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able + // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. + if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && + !IsRequestableTool(chatOptions, openAIFunctionToolCall)) + { + AddResponseMessage(chat, result: null, "Error: Function call request for a function that wasn't defined.", functionToolCall, this.Logger); + continue; + } + + // Find the function in the kernel and populate the arguments. + if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) + { + AddResponseMessage(chat, result: null, "Error: Requested function could not be found.", functionToolCall, this.Logger); + continue; + } + + // Now, invoke the function, and add the resulting tool call message to the chat options. + FunctionResult functionResult = new(function) { Culture = kernel.Culture }; + AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, chatMessageContent) + { + Arguments = functionArgs, + RequestSequenceIndex = requestIndex, + FunctionSequenceIndex = toolCallIndex, + FunctionCount = chatMessageContent.ToolCalls.Count + }; + + s_inflightAutoInvokes.Value++; + try + { + invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => + { + // Check if filter requested termination. + if (context.Terminate) + { + return; + } + + // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any + // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, + // as the called function could in turn telling the model about itself as a possible candidate for invocation. + context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } +#pragma warning disable CA1031 // Do not catch general exception types + catch (Exception e) +#pragma warning restore CA1031 // Do not catch general exception types + { + AddResponseMessage(chat, null, $"Error: Exception while invoking function. {e.Message}", functionToolCall, this.Logger); + continue; + } + finally + { + s_inflightAutoInvokes.Value--; + } + + // Apply any changes from the auto function invocation filters context to final result. + functionResult = invocationContext.Result; + + object functionResultValue = functionResult.GetValue() ?? string.Empty; + var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); + + AddResponseMessage(chat, stringResult, errorMessage: null, functionToolCall, this.Logger); + + // If filter requested termination, returning latest function result. + if (invocationContext.Terminate) + { + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Filter requested termination of automatic function invocation."); + } + + return [chat.Last()]; + } + } + } + } + + internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( + string targetModel, + ChatHistory chat, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(chat); + + if (this.Logger!.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chat), + JsonSerializer.Serialize(executionSettings)); + } + + OpenAIPromptExecutionSettings chatExecutionSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + StringBuilder? contentBuilder = null; + Dictionary? toolCallIdsByIndex = null; + Dictionary? functionNamesByIndex = null; + Dictionary? functionArgumentBuildersByIndex = null; + + for (int requestIndex = 0; ; requestIndex++) + { + var chatForRequest = CreateChatCompletionMessages(chatExecutionSettings, chat); + + var toolCallingConfig = this.GetToolCallingConfiguration(kernel, chatExecutionSettings, requestIndex); + + var chatOptions = this.CreateChatCompletionOptions(chatExecutionSettings, chat, toolCallingConfig, kernel); + + // Reset state + contentBuilder?.Clear(); + toolCallIdsByIndex?.Clear(); + functionNamesByIndex?.Clear(); + functionArgumentBuildersByIndex?.Clear(); + + // Stream the response. + IReadOnlyDictionary? metadata = null; + string? streamedName = null; + ChatMessageRole? streamedRole = default; + ChatFinishReason finishReason = default; + ChatToolCall[]? toolCalls = null; + FunctionCallContent[]? functionCallContents = null; + + using (var activity = this.StartCompletionActivity(chat, chatExecutionSettings)) + { + // Make the request. + AsyncCollectionResult response; + try + { + response = RunRequest(() => this.Client!.GetChatClient(targetModel).CompleteChatStreamingAsync(chatForRequest, chatOptions, cancellationToken)); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); + List? streamedContents = activity is not null ? [] : null; + try + { + while (true) + { + try + { + if (!await responseEnumerator.MoveNextAsync()) + { + break; + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + StreamingChatCompletionUpdate chatCompletionUpdate = responseEnumerator.Current; + metadata = GetChatCompletionMetadata(chatCompletionUpdate); + streamedRole ??= chatCompletionUpdate.Role; + //streamedName ??= update.AuthorName; + finishReason = chatCompletionUpdate.FinishReason ?? default; + + // If we're intending to invoke function calls, we need to consume that function call information. + if (toolCallingConfig.AutoInvoke) + { + foreach (var contentPart in chatCompletionUpdate.ContentUpdate) + { + if (contentPart.Kind == ChatMessageContentPartKind.Text) + { + (contentBuilder ??= new()).Append(contentPart.Text); + } + } + + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatCompletionUpdate.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + } + + var openAIStreamingChatMessageContent = new OpenAIStreamingChatMessageContent(chatCompletionUpdate, 0, targetModel, metadata); + + if (openAIStreamingChatMessageContent.ToolCallUpdates is not null) + { + foreach (var functionCallUpdate in openAIStreamingChatMessageContent.ToolCallUpdates!) + { + // Using the code below to distinguish and skip non - function call related updates. + // The Kind property of updates can't be reliably used because it's only initialized for the first update. + if (string.IsNullOrEmpty(functionCallUpdate.Id) && + string.IsNullOrEmpty(functionCallUpdate.FunctionName) && + string.IsNullOrEmpty(functionCallUpdate.FunctionArgumentsUpdate)) + { + continue; + } + + openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( + callId: functionCallUpdate.Id, + name: functionCallUpdate.FunctionName, + arguments: functionCallUpdate.FunctionArgumentsUpdate, + functionCallIndex: functionCallUpdate.Index)); + } + } + streamedContents?.Add(openAIStreamingChatMessageContent); + yield return openAIStreamingChatMessageContent; + } + + // Translate all entries into ChatCompletionsFunctionToolCall instances. + toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( + ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + // Translate all entries into FunctionCallContent instances for diagnostics purposes. + functionCallContents = this.GetFunctionCallContents(toolCalls).ToArray(); + } + finally + { + activity?.EndStreaming(streamedContents, ModelDiagnostics.IsSensitiveEventsEnabled() ? functionCallContents : null); + await responseEnumerator.DisposeAsync(); + } + } + + // If we don't have a function to invoke, we're done. + // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service + // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool + // is specified. + if (!toolCallingConfig.AutoInvoke || + toolCallIdsByIndex is not { Count: > 0 }) + { + yield break; + } + + // Get any response content that was streamed. + string content = contentBuilder?.ToString() ?? string.Empty; + + // Log the requests + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.FunctionName}({fcr.FunctionName})"))); + } + else if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Function call requests: {Requests}", toolCalls.Length); + } + + // Add the result message to the caller's chat history; this is required for the service to understand the tool call responses. + var chatMessageContent = this.CreateChatMessageContent(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName); + chat.Add(chatMessageContent); + + // Respond to each tooling request. + for (int toolCallIndex = 0; toolCallIndex < toolCalls.Length; toolCallIndex++) + { + ChatToolCall toolCall = toolCalls[toolCallIndex]; + + // We currently only know about function tool calls. If it's anything else, we'll respond with an error. + if (string.IsNullOrEmpty(toolCall.FunctionName)) + { + AddResponseMessage(chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger); + continue; + } + + // Parse the function call arguments. + OpenAIFunctionToolCall? openAIFunctionToolCall; + try + { + openAIFunctionToolCall = new(toolCall); + } + catch (JsonException) + { + AddResponseMessage(chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger); + continue; + } + + // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, + // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able + // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. + if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && + !IsRequestableTool(chatOptions, openAIFunctionToolCall)) + { + AddResponseMessage(chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger); + continue; + } + + // Find the function in the kernel and populate the arguments. + if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) + { + AddResponseMessage(chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger); + continue; + } + + // Now, invoke the function, and add the resulting tool call message to the chat options. + FunctionResult functionResult = new(function) { Culture = kernel.Culture }; + AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, chatMessageContent) + { + Arguments = functionArgs, + RequestSequenceIndex = requestIndex, + FunctionSequenceIndex = toolCallIndex, + FunctionCount = toolCalls.Length + }; + + s_inflightAutoInvokes.Value++; + try + { + invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => + { + // Check if filter requested termination. + if (context.Terminate) + { + return; + } + + // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any + // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, + // as the called function could in turn telling the model about itself as a possible candidate for invocation. + context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } +#pragma warning disable CA1031 // Do not catch general exception types + catch (Exception e) +#pragma warning restore CA1031 // Do not catch general exception types + { + AddResponseMessage(chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger); + continue; + } + finally + { + s_inflightAutoInvokes.Value--; + } + + // Apply any changes from the auto function invocation filters context to final result. + functionResult = invocationContext.Result; + + object functionResultValue = functionResult.GetValue() ?? string.Empty; + var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); + + AddResponseMessage(chat, stringResult, errorMessage: null, toolCall, this.Logger); + + // If filter requested termination, returning latest function result and breaking request iteration loop. + if (invocationContext.Terminate) + { + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Filter requested termination of automatic function invocation."); + } + + var lastChatMessage = chat.Last(); + + yield return new OpenAIStreamingChatMessageContent(lastChatMessage.Role, lastChatMessage.Content); + yield break; + } + } + } + } + + internal async IAsyncEnumerable GetChatAsTextStreamingContentsAsync( + string targetModel, + string prompt, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = this.GetSpecializedExecutionSettings(executionSettings); + ChatHistory chat = CreateNewChat(prompt, chatSettings); + + await foreach (var chatUpdate in this.GetStreamingChatMessageContentsAsync(targetModel, chat, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingTextContent(chatUpdate.Content, chatUpdate.ChoiceIndex, chatUpdate.ModelId, chatUpdate, Encoding.UTF8, chatUpdate.Metadata); + } + } + + internal async Task> GetChatAsTextContentsAsync( + string model, + string text, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ChatHistory chat = CreateNewChat(text, chatSettings); + return (await this.GetChatMessageContentsAsync(model, chat, chatSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Select(chat => new TextContent(chat.Content, chat.ModelId, chat.Content, Encoding.UTF8, chat.Metadata)) + .ToList(); + } + + /// + /// Returns a specialized execution settings object for the OpenAI chat completion service. + /// + /// Potential execution settings infer specialized. + /// Specialized settings + protected virtual OpenAIPromptExecutionSettings GetSpecializedExecutionSettings(PromptExecutionSettings? executionSettings) + => OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + /// + /// Start a chat completion activity for a given model. + /// The activity will be tagged with the a set of attributes specified by the semantic conventions. + /// + protected virtual Activity? StartCompletionActivity(ChatHistory chatHistory, PromptExecutionSettings settings) + => ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.ModelId, ModelProvider, chatHistory, settings); + + protected virtual ChatCompletionOptions CreateChatCompletionOptions( + OpenAIPromptExecutionSettings executionSettings, + ChatHistory chatHistory, + ToolCallingConfig toolCallingConfig, + Kernel? kernel) + { + var options = new ChatCompletionOptions + { + MaxTokens = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + TopP = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, + Seed = executionSettings.Seed, + EndUserId = executionSettings.User, + TopLogProbabilityCount = executionSettings.TopLogprobs, + IncludeLogProbabilities = executionSettings.Logprobs, + }; + + var responseFormat = GetResponseFormat(executionSettings); + if (responseFormat is not null) + { + options.ResponseFormat = responseFormat; + } + + if (toolCallingConfig.Choice is not null) + { + options.ToolChoice = toolCallingConfig.Choice; + } + + if (toolCallingConfig.Tools is { Count: > 0 } tools) + { + options.Tools.AddRange(tools); + } + + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.LogitBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + return options; + } + + /// + /// Retrieves the response format based on the provided settings. + /// + /// Execution settings. + /// Chat response format + protected static ChatResponseFormat? GetResponseFormat(OpenAIPromptExecutionSettings executionSettings) + { + switch (executionSettings.ResponseFormat) + { + case ChatResponseFormat formatObject: + // If the response format is an OpenAI SDK ChatCompletionsResponseFormat, just pass it along. + return formatObject; + case string formatString: + // If the response format is a string, map the ones we know about, and ignore the rest. + switch (formatString) + { + case "json_object": + return ChatResponseFormat.JsonObject; + + case "text": + return ChatResponseFormat.Text; + } + break; + + case JsonElement formatElement: + // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. + // Handling only string formatElement. + if (formatElement.ValueKind == JsonValueKind.String) + { + string formatString = formatElement.GetString() ?? ""; + switch (formatString) + { + case "json_object": + return ChatResponseFormat.JsonObject; + + case "text": + return ChatResponseFormat.Text; + } + } + break; + } + + return null; + } + + /// Checks if a tool call is for a function that was defined. + private static bool IsRequestableTool(ChatCompletionOptions options, OpenAIFunctionToolCall ftc) + { + IList tools = options.Tools; + for (int i = 0; i < tools.Count; i++) + { + if (tools[i].Kind == ChatToolKind.Function && + string.Equals(tools[i].FunctionName, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + /// + /// Create a new empty chat instance + /// + /// Optional chat instructions for the AI service + /// Execution settings + /// Chat object + private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null) + { + var chat = new ChatHistory(); + + // If settings is not provided, create a new chat with the text as the system prompt + AuthorRole textRole = AuthorRole.System; + + if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt)) + { + chat.AddSystemMessage(executionSettings!.ChatSystemPrompt!); + textRole = AuthorRole.User; + } + + if (!string.IsNullOrWhiteSpace(text)) + { + chat.AddMessage(textRole, text!); + } + + return chat; + } + + private static List CreateChatCompletionMessages(OpenAIPromptExecutionSettings executionSettings, ChatHistory chatHistory) + { + List messages = []; + + if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) + { + messages.Add(new SystemChatMessage(executionSettings.ChatSystemPrompt)); + } + + foreach (var message in chatHistory) + { + messages.AddRange(CreateRequestMessages(message, executionSettings.ToolCallBehavior)); + } + + return messages; + } + + private static List CreateRequestMessages(ChatMessageContent message, ToolCallBehavior? toolCallBehavior) + { + if (message.Role == AuthorRole.System) + { + return [new SystemChatMessage(message.Content) { ParticipantName = message.AuthorName }]; + } + + if (message.Role == AuthorRole.Tool) + { + // Handling function results represented by the TextContent type. + // Example: new ChatMessageContent(AuthorRole.Tool, content, metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }) + if (message.Metadata?.TryGetValue(OpenAIChatMessageContent.ToolIdProperty, out object? toolId) is true && + toolId?.ToString() is string toolIdString) + { + return [new ToolChatMessage(toolIdString, message.Content)]; + } + + // Handling function results represented by the FunctionResultContent type. + // Example: new ChatMessageContent(AuthorRole.Tool, items: new ChatMessageContentItemCollection { new FunctionResultContent(functionCall, result) }) + List? toolMessages = null; + foreach (var item in message.Items) + { + if (item is not FunctionResultContent resultContent) + { + continue; + } + + toolMessages ??= []; + + if (resultContent.Result is Exception ex) + { + toolMessages.Add(new ToolChatMessage(resultContent.CallId, $"Error: Exception while invoking function. {ex.Message}")); + continue; + } + + var stringResult = ProcessFunctionResult(resultContent.Result ?? string.Empty, toolCallBehavior); + + toolMessages.Add(new ToolChatMessage(resultContent.CallId, stringResult ?? string.Empty)); + } + + if (toolMessages is not null) + { + return toolMessages; + } + + throw new NotSupportedException("No function result provided in the tool message."); + } + + if (message.Role == AuthorRole.User) + { + if (message.Items is { Count: 1 } && message.Items.FirstOrDefault() is TextContent textContent) + { + return [new UserChatMessage(textContent.Text) { ParticipantName = message.AuthorName }]; + } + + return [new UserChatMessage(message.Items.Select(static (KernelContent item) => (ChatMessageContentPart)(item switch + { + TextContent textContent => ChatMessageContentPart.CreateTextMessageContentPart(textContent.Text), + ImageContent imageContent => GetImageContentItem(imageContent), + _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") + }))) + { ParticipantName = message.AuthorName }]; + } + + if (message.Role == AuthorRole.Assistant) + { + var toolCalls = new List(); + + // Handling function calls supplied via either: + // ChatCompletionsToolCall.ToolCalls collection items or + // ChatMessageContent.Metadata collection item with 'ChatResponseMessage.FunctionToolCalls' key. + IEnumerable? tools = (message as OpenAIChatMessageContent)?.ToolCalls; + if (tools is null && message.Metadata?.TryGetValue(OpenAIChatMessageContent.FunctionToolCallsProperty, out object? toolCallsObject) is true) + { + tools = toolCallsObject as IEnumerable; + if (tools is null && toolCallsObject is JsonElement { ValueKind: JsonValueKind.Array } array) + { + int length = array.GetArrayLength(); + var ftcs = new List(length); + for (int i = 0; i < length; i++) + { + JsonElement e = array[i]; + if (e.TryGetProperty("Id", out JsonElement id) && + e.TryGetProperty("Name", out JsonElement name) && + e.TryGetProperty("Arguments", out JsonElement arguments) && + id.ValueKind == JsonValueKind.String && + name.ValueKind == JsonValueKind.String && + arguments.ValueKind == JsonValueKind.String) + { + ftcs.Add(ChatToolCall.CreateFunctionToolCall(id.GetString()!, name.GetString()!, arguments.GetString()!)); + } + } + tools = ftcs; + } + } + + if (tools is not null) + { + toolCalls.AddRange(tools); + } + + // Handling function calls supplied via ChatMessageContent.Items collection elements of the FunctionCallContent type. + HashSet? functionCallIds = null; + foreach (var item in message.Items) + { + if (item is not FunctionCallContent callRequest) + { + continue; + } + + functionCallIds ??= new HashSet(toolCalls.Select(t => t.Id)); + + if (callRequest.Id is null || functionCallIds.Contains(callRequest.Id)) + { + continue; + } + + var argument = JsonSerializer.Serialize(callRequest.Arguments); + + toolCalls.Add(ChatToolCall.CreateFunctionToolCall(callRequest.Id, FunctionName.ToFullyQualifiedName(callRequest.FunctionName, callRequest.PluginName, OpenAIFunction.NameSeparator), argument ?? string.Empty)); + } + + // This check is necessary to prevent an exception that will be thrown if the toolCalls collection is empty. + // HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' + if (toolCalls.Count == 0) + { + return [new AssistantChatMessage(message.Content) { ParticipantName = message.AuthorName }]; + } + + return [new AssistantChatMessage(toolCalls, message.Content) { ParticipantName = message.AuthorName }]; + } + + throw new NotSupportedException($"Role {message.Role} is not supported."); + } + + private static ChatMessageContentPart GetImageContentItem(ImageContent imageContent) + { + if (imageContent.Data is { IsEmpty: false } data) + { + return ChatMessageContentPart.CreateImageMessageContentPart(BinaryData.FromBytes(data), imageContent.MimeType); + } + + if (imageContent.Uri is not null) + { + return ChatMessageContentPart.CreateImageMessageContentPart(imageContent.Uri); + } + + throw new ArgumentException($"{nameof(ImageContent)} must have either Data or a Uri."); + } + + private OpenAIChatMessageContent CreateChatMessageContent(OpenAIChatCompletion completion, string targetModel) + { + var message = new OpenAIChatMessageContent(completion, targetModel, this.GetChatCompletionMetadata(completion)); + + message.Items.AddRange(this.GetFunctionCallContents(completion.ToolCalls)); + + return message; + } + + private OpenAIChatMessageContent CreateChatMessageContent(ChatMessageRole chatRole, string content, ChatToolCall[] toolCalls, FunctionCallContent[]? functionCalls, IReadOnlyDictionary? metadata, string? authorName) + { + var message = new OpenAIChatMessageContent(chatRole, content, this.ModelId, toolCalls, metadata) + { + AuthorName = authorName, + }; + + if (functionCalls is not null) + { + message.Items.AddRange(functionCalls); + } + + return message; + } + + private List GetFunctionCallContents(IEnumerable toolCalls) + { + List result = []; + + foreach (var toolCall in toolCalls) + { + // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property. + // This allows consumers to work with functions in an LLM-agnostic way. + if (toolCall.Kind == ChatToolCallKind.Function) + { + Exception? exception = null; + KernelArguments? arguments = null; + try + { + arguments = JsonSerializer.Deserialize(toolCall.FunctionArguments); + if (arguments is not null) + { + // Iterate over copy of the names to avoid mutating the dictionary while enumerating it + var names = arguments.Names.ToArray(); + foreach (var name in names) + { + arguments[name] = arguments[name]?.ToString(); + } + } + } + catch (JsonException ex) + { + exception = new KernelException("Error: Function call arguments were invalid JSON.", ex); + + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", toolCall.FunctionName, toolCall.Id); + } + } + + var functionName = FunctionName.Parse(toolCall.FunctionName, OpenAIFunction.NameSeparator); + + var functionCallContent = new FunctionCallContent( + functionName: functionName.Name, + pluginName: functionName.PluginName, + id: toolCall.Id, + arguments: arguments) + { + InnerContent = toolCall, + Exception = exception + }; + + result.Add(functionCallContent); + } + } + + return result; + } + + private static void AddResponseMessage(ChatHistory chat, string? result, string? errorMessage, ChatToolCall toolCall, ILogger logger) + { + // Log any error + if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug)) + { + Debug.Assert(result is null); + logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, errorMessage); + } + + result ??= errorMessage ?? string.Empty; + + // Add the tool response message to the chat history. + var message = new ChatMessageContent(role: AuthorRole.Tool, content: result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }); + + if (toolCall.Kind == ChatToolCallKind.Function) + { + // Add an item of type FunctionResultContent to the ChatMessageContent.Items collection in addition to the function result stored as a string in the ChatMessageContent.Content property. + // This will enable migration to the new function calling model and facilitate the deprecation of the current one in the future. + var functionName = FunctionName.Parse(toolCall.FunctionName, OpenAIFunction.NameSeparator); + message.Items.Add(new FunctionResultContent(functionName.Name, functionName.PluginName, toolCall.Id, result)); + } + + chat.Add(message); + } + + private static void ValidateMaxTokens(int? maxTokens) + { + if (maxTokens.HasValue && maxTokens < 1) + { + throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); + } + } + + /// + /// Captures usage details, including token information. + /// + /// Instance of with token usage details. + private void LogUsage(ChatTokenUsage usage) + { + if (usage is null) + { + this.Logger!.LogDebug("Token usage information unavailable."); + return; + } + + if (this.Logger!.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation( + "Prompt tokens: {InputTokens}. Completion tokens: {OutputTokens}. Total tokens: {TotalTokens}.", + usage.InputTokens, usage.OutputTokens, usage.TotalTokens); + } + + s_promptTokensCounter.Add(usage.InputTokens); + s_completionTokensCounter.Add(usage.OutputTokens); + s_totalTokensCounter.Add(usage.TotalTokens); + } + + /// + /// Processes the function result. + /// + /// The result of the function call. + /// The ToolCallBehavior object containing optional settings like JsonSerializerOptions.TypeInfoResolver. + /// A string representation of the function result. + private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior) + { + if (functionResult is string stringResult) + { + return stringResult; + } + + // This is an optimization to use ChatMessageContent content directly + // without unnecessary serialization of the whole message content class. + if (functionResult is ChatMessageContent chatMessageContent) + { + return chatMessageContent.ToString(); + } + + // For polymorphic serialization of unknown in advance child classes of the KernelContent class, + // a corresponding JsonTypeInfoResolver should be provided via the JsonSerializerOptions.TypeInfoResolver property. + // For more details about the polymorphic serialization, see the article at: + // https://learn.microsoft.com/en-us/dotnet/standard/serialization/system-text-json/polymorphism?pivots=dotnet-8-0 +#pragma warning disable CS0618 // Type or member is obsolete + return JsonSerializer.Serialize(functionResult, toolCallBehavior?.ToolCallResultSerializerOptions); +#pragma warning restore CS0618 // Type or member is obsolete + } + + /// + /// Executes auto function invocation filters and/or function itself. + /// This method can be moved to when auto function invocation logic will be extracted to common place. + /// + private static async Task OnAutoFunctionInvocationAsync( + Kernel kernel, + AutoFunctionInvocationContext context, + Func functionCallCallback) + { + await InvokeFilterOrFunctionAsync(kernel.AutoFunctionInvocationFilters, functionCallCallback, context).ConfigureAwait(false); + + return context; + } + + /// + /// This method will execute auto function invocation filters and function recursively. + /// If there are no registered filters, just function will be executed. + /// If there are registered filters, filter on position will be executed. + /// Second parameter of filter is callback. It can be either filter on + 1 position or function if there are no remaining filters to execute. + /// Function will be always executed as last step after all filters. + /// + private static async Task InvokeFilterOrFunctionAsync( + IList? autoFunctionInvocationFilters, + Func functionCallCallback, + AutoFunctionInvocationContext context, + int index = 0) + { + if (autoFunctionInvocationFilters is { Count: > 0 } && index < autoFunctionInvocationFilters.Count) + { + await autoFunctionInvocationFilters[index].OnAutoFunctionInvocationAsync(context, + (context) => InvokeFilterOrFunctionAsync(autoFunctionInvocationFilters, functionCallCallback, context, index + 1)).ConfigureAwait(false); + } + else + { + await functionCallCallback(context).ConfigureAwait(false); + } + } + + private ToolCallingConfig GetToolCallingConfiguration(Kernel? kernel, OpenAIPromptExecutionSettings executionSettings, int requestIndex) + { + if (executionSettings.ToolCallBehavior is null) + { + return new ToolCallingConfig(Tools: null, Choice: null, AutoInvoke: false); + } + + if (requestIndex >= executionSettings.ToolCallBehavior.MaximumUseAttempts) + { + // Don't add any tools as we've reached the maximum attempts limit. + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", executionSettings.ToolCallBehavior!.MaximumUseAttempts); + } + + return new ToolCallingConfig(Tools: [s_nonInvocableFunctionTool], Choice: ChatToolChoice.None, AutoInvoke: false); + } + + var (tools, choice) = executionSettings.ToolCallBehavior.ConfigureOptions(kernel); + + bool autoInvoke = kernel is not null && + executionSettings.ToolCallBehavior.MaximumAutoInvokeAttempts > 0 && + s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; + + // Disable auto invocation if we've exceeded the allowed limit. + if (requestIndex >= executionSettings.ToolCallBehavior.MaximumAutoInvokeAttempts) + { + autoInvoke = false; + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", executionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); + } + } + + return new ToolCallingConfig( + Tools: tools ?? [s_nonInvocableFunctionTool], + Choice: choice ?? ChatToolChoice.None, + AutoInvoke: autoInvoke); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs new file mode 100644 index 000000000000..1476d0b15158 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Embeddings; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an embedding from the given . + /// + /// Target model to generate embeddings from + /// List of strings to generate embeddings for + /// The containing services, plugins, and other state for use throughout the operation. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The to monitor for cancellation requests. The default is . + /// List of embeddings + internal async Task>> GetEmbeddingsAsync( + string targetModel, + IList data, + Kernel? kernel, + int? dimensions, + CancellationToken cancellationToken) + { + var result = new List>(data.Count); + + if (data.Count > 0) + { + var embeddingsOptions = new EmbeddingGenerationOptions() + { + Dimensions = dimensions + }; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetEmbeddingClient(targetModel).GenerateEmbeddingsAsync(data, embeddingsOptions, cancellationToken)).ConfigureAwait(false); + var embeddings = response.Value; + + if (embeddings.Count != data.Count) + { + throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}"); + } + + for (var i = 0; i < embeddings.Count; i++) + { + result.Add(embeddings[i].Vector); + } + } + + return result; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs new file mode 100644 index 000000000000..1a34fe7a0230 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Audio; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Prompt to generate the image + /// Text to Audio execution settings for the prompt + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task> GetAudioContentsAsync( + string targetModel, + string prompt, + PromptExecutionSettings? executionSettings, + CancellationToken cancellationToken) + { + Verify.NotNullOrWhiteSpace(prompt); + + OpenAITextToAudioExecutionSettings audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); + + var (responseFormat, mimeType) = GetGeneratedSpeechFormatAndMimeType(audioExecutionSettings.ResponseFormat); + + SpeechGenerationOptions options = new() + { + ResponseFormat = responseFormat, + Speed = audioExecutionSettings.Speed, + }; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).GenerateSpeechAsync(prompt, GetGeneratedSpeechVoice(audioExecutionSettings?.Voice), options, cancellationToken)).ConfigureAwait(false); + + return [new AudioContent(response.Value.ToArray(), mimeType)]; + } + + private static GeneratedSpeechVoice GetGeneratedSpeechVoice(string? voice) + => voice?.ToUpperInvariant() switch + { + "ALLOY" => GeneratedSpeechVoice.Alloy, + "ECHO" => GeneratedSpeechVoice.Echo, + "FABLE" => GeneratedSpeechVoice.Fable, + "ONYX" => GeneratedSpeechVoice.Onyx, + "NOVA" => GeneratedSpeechVoice.Nova, + "SHIMMER" => GeneratedSpeechVoice.Shimmer, + _ => throw new NotSupportedException($"The voice '{voice}' is not supported."), + }; + + private static (GeneratedSpeechFormat? Format, string? MimeType) GetGeneratedSpeechFormatAndMimeType(string? format) + => format?.ToUpperInvariant() switch + { + "WAV" => (GeneratedSpeechFormat.Wav, "audio/wav"), + "MP3" => (GeneratedSpeechFormat.Mp3, "audio/mpeg"), + "OPUS" => (GeneratedSpeechFormat.Opus, "audio/opus"), + "FLAC" => (GeneratedSpeechFormat.Flac, "audio/flac"), + "AAC" => (GeneratedSpeechFormat.Aac, "audio/aac"), + "PCM" => (GeneratedSpeechFormat.Pcm, "audio/l16"), + null => (null, null), + _ => throw new NotSupportedException($"The format '{format}' is not supported.") + }; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs new file mode 100644 index 000000000000..1cb9c5993eae --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Images; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Prompt to generate the image + /// Width of the image + /// Height of the image + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task GenerateImageAsync( + string? targetModel, + string prompt, + int width, + int height, + CancellationToken cancellationToken) + { + Verify.NotNullOrWhiteSpace(prompt); + + var size = new GeneratedImageSize(width, height); + + var imageOptions = new ImageGenerationOptions() + { + Size = size, + ResponseFormat = GeneratedImageFormat.Uri + }; + + // The model is not required by the OpenAI API and defaults to the DALL-E 2 server-side - https://platform.openai.com/docs/api-reference/images/create#images-create-model. + // However, considering that the model is required by the OpenAI SDK and the ModelId property is optional, it defaults to DALL-E 2 in the line below. + targetModel = string.IsNullOrEmpty(targetModel) ? "dall-e-2" : targetModel!; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetImageClient(targetModel).GenerateImageAsync(prompt, imageOptions, cancellationToken)).ConfigureAwait(false); + var generatedImage = response.Value; + + return generatedImage.ImageUri?.ToString() ?? throw new KernelException("The generated image is not in url format"); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs new file mode 100644 index 000000000000..271aa2321ea2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs @@ -0,0 +1,249 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using OpenAI; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// White space constant. + /// + private const string SingleSpace = " "; + + /// + /// Gets the attribute name used to store the organization in the dictionary. + /// + internal const string OrganizationKey = "Organization"; + + /// + /// Default OpenAI API endpoint. + /// + private const string OpenAIEndpoint = "https://api.openai.com/"; + + /// + /// Identifier of the default model to use + /// + protected internal string ModelId { get; init; } = string.Empty; + + /// + /// Non-default endpoint for OpenAI API. + /// + protected internal Uri? Endpoint { get; init; } + + /// + /// Logger instance + /// + protected internal ILogger? Logger { get; init; } + + /// + /// OpenAI Client + /// + protected internal OpenAIClient? Client { get; set; } + + /// + /// Storage for AI service attributes. + /// + internal Dictionary Attributes { get; } = []; + + /// + /// Initializes a new instance of the class. + /// + /// Model name. + /// OpenAI API Key. + /// OpenAI Organization Id (usually optional). + /// OpenAI compatible API endpoint. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal ClientCore( + string? modelId = null, + string? apiKey = null, + string? organizationId = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILogger? logger = null) + { + // Empty constructor will be used when inherited by a specialized Client. + if (modelId is null + && apiKey is null + && organizationId is null + && endpoint is null + && httpClient is null + && logger is null) + { + return; + } + + if (!string.IsNullOrWhiteSpace(modelId)) + { + this.ModelId = modelId!; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + this.Logger = logger ?? NullLogger.Instance; + + // Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint. + this.Endpoint = endpoint ?? httpClient?.BaseAddress; + if (this.Endpoint is null) + { + Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided. + this.Endpoint = new Uri(OpenAIEndpoint); + } + else if (string.IsNullOrEmpty(apiKey)) + { + // Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key. + apiKey = SingleSpace; + } + + this.AddAttribute(AIServiceExtensions.EndpointKey, this.Endpoint.ToString()); + + var options = GetOpenAIClientOptions(httpClient, this.Endpoint); + if (!string.IsNullOrWhiteSpace(organizationId)) + { + options.AddPolicy(CreateRequestHeaderPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall); + + this.AddAttribute(ClientCore.OrganizationKey, organizationId); + } + + this.Client = new OpenAIClient(apiKey!, options); + } + + /// + /// Initializes a new instance of the class using the specified OpenAIClient. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// OpenAI model Id + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal ClientCore( + string? modelId, + OpenAIClient openAIClient, + ILogger? logger = null) + { + // Model Id may not be required when other services. i.e: File Service. + if (modelId is not null) + { + this.ModelId = modelId; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + Verify.NotNull(openAIClient); + + this.Logger = logger ?? NullLogger.Instance; + this.Client = openAIClient; + } + + /// + /// Logs OpenAI action details. + /// + /// Caller member name. Populated automatically by runtime. + internal void LogActionDetails([CallerMemberName] string? callerMemberName = default) + { + if (this.Logger!.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.ModelId); + } + } + + /// + /// Allows adding attributes to the client. + /// + /// Attribute key. + /// Attribute value. + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this.Attributes.Add(key, value); + } + } + + /// Gets options to use for an OpenAIClient + /// Custom for HTTP requests. + /// Endpoint for the OpenAI API. + /// An instance of . + private static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, Uri? endpoint) + { + OpenAIClientOptions options = new() + { + ApplicationId = HttpHeaderConstant.Values.UserAgent, + Endpoint = endpoint + }; + + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout + } + + return options; + } + + /// + /// Invokes the specified request and handles exceptions. + /// + /// Type of the response. + /// Request to invoke. + /// Returns the response. + protected static async Task RunRequestAsync(Func> request) + { + try + { + return await request.Invoke().ConfigureAwait(false); + } + catch (ClientResultException e) + { + throw e.ToHttpOperationException(); + } + } + + /// + /// Invokes the specified request and handles exceptions. + /// + /// Type of the response. + /// Request to invoke. + /// Returns the response. + protected static T RunRequest(Func request) + { + try + { + return request.Invoke(); + } + catch (ClientResultException e) + { + throw e.ToHttpOperationException(); + } + } + + protected static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue) + { + return new GenericActionPipelinePolicy((message) => + { + if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false) + { + message.Request.Headers.Set(headerName, headerValue); + } + }); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs similarity index 65% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs index d91f8e45fc40..3015fa09604f 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs @@ -2,8 +2,9 @@ using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Chat; +using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -13,28 +14,28 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public sealed class OpenAIChatMessageContent : ChatMessageContent { /// - /// Gets the metadata key for the name property. + /// Gets the metadata key for the tool id. /// - public static string ToolIdProperty => $"{nameof(ChatCompletionsToolCall)}.{nameof(ChatCompletionsToolCall.Id)}"; + public static string ToolIdProperty => "ChatCompletionsToolCall.Id"; /// - /// Gets the metadata key for the list of . + /// Gets the metadata key for the list of . /// - internal static string FunctionToolCallsProperty => $"{nameof(ChatResponseMessage)}.FunctionToolCalls"; + internal static string FunctionToolCallsProperty => "ChatResponseMessage.FunctionToolCalls"; /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(ChatResponseMessage chatMessage, string modelId, IReadOnlyDictionary? metadata = null) - : base(new AuthorRole(chatMessage.Role.ToString()), chatMessage.Content, modelId, chatMessage, System.Text.Encoding.UTF8, CreateMetadataDictionary(chatMessage.ToolCalls, metadata)) + internal OpenAIChatMessageContent(OpenAIChatCompletion completion, string modelId, IReadOnlyDictionary? metadata = null) + : base(new AuthorRole(completion.Role.ToString()), CreateContentItems(completion.Content), modelId, completion, System.Text.Encoding.UTF8, CreateMetadataDictionary(completion.ToolCalls, metadata)) { - this.ToolCalls = chatMessage.ToolCalls; + this.ToolCalls = completion.ToolCalls; } /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(ChatRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + internal OpenAIChatMessageContent(ChatMessageRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) : base(new AuthorRole(role.ToString()), content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) { this.ToolCalls = toolCalls; @@ -43,16 +44,32 @@ internal OpenAIChatMessageContent(ChatRole role, string? content, string modelId /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + internal OpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) : base(role, content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) { this.ToolCalls = toolCalls; } + private static ChatMessageContentItemCollection CreateContentItems(IReadOnlyList contentUpdate) + { + ChatMessageContentItemCollection collection = []; + + foreach (var part in contentUpdate) + { + // We only support text content for now. + if (part.Kind == ChatMessageContentPartKind.Text) + { + collection.Add(new TextContent(part.Text)); + } + } + + return collection; + } + /// /// A list of the tools called by the model. /// - public IReadOnlyList ToolCalls { get; } + public IReadOnlyList ToolCalls { get; } /// /// Retrieve the resulting function from the chat result. @@ -64,9 +81,9 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() foreach (var toolCall in this.ToolCalls) { - if (toolCall is ChatCompletionsFunctionToolCall functionToolCall) + if (toolCall.Kind == ChatToolCallKind.Function) { - (functionToolCallList ??= []).Add(new OpenAIFunctionToolCall(functionToolCall)); + (functionToolCallList ??= []).Add(new OpenAIFunctionToolCall(toolCall)); } } @@ -79,7 +96,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() } private static IReadOnlyDictionary? CreateMetadataDictionary( - IReadOnlyList toolCalls, + IReadOnlyList toolCalls, IReadOnlyDictionary? original) { // We only need to augment the metadata if there are any tool calls. @@ -107,7 +124,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() } // Add the additional entry. - newDictionary.Add(FunctionToolCallsProperty, toolCalls.OfType().ToList()); + newDictionary.Add(FunctionToolCallsProperty, toolCalls.Where(ctc => ctc.Kind == ChatToolCallKind.Function).ToList()); return newDictionary; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs index b51faa59c359..512277245fec 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs @@ -2,14 +2,10 @@ using System; using System.Collections.Generic; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; -// NOTE: Since this space is evolving rapidly, in order to reduce the risk of needing to take breaking -// changes as OpenAI's APIs evolve, these types are not externally constructible. In the future, once -// things stabilize, and if need demonstrates, we could choose to expose those constructors. - /// /// Represents a function parameter that can be passed to an OpenAI function tool call. /// @@ -127,11 +123,11 @@ internal OpenAIFunction( public OpenAIFunctionReturnParameter? ReturnParameter { get; } /// - /// Converts the representation to the Azure SDK's - /// representation. + /// Converts the representation to the OpenAI SDK's + /// representation. /// - /// A containing all the function information. - public FunctionDefinition ToFunctionDefinition() + /// A containing all the function information. + public ChatTool ToFunctionDefinition() { BinaryData resultParameters = s_zeroFunctionParametersSchema; @@ -159,12 +155,12 @@ public FunctionDefinition ToFunctionDefinition() }); } - return new FunctionDefinition - { - Name = this.FullyQualifiedName, - Description = this.Description, - Parameters = resultParameters, - }; + return ChatTool.CreateFunctionTool + ( + functionName: this.FullyQualifiedName, + functionDescription: this.Description, + functionParameters: resultParameters + ); } /// Gets a for a typeless parameter with the specified description, defaulting to typeof(string) diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs similarity index 77% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs index af4688e06df1..822862b24d87 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs @@ -5,7 +5,7 @@ using System.Diagnostics; using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -16,15 +16,15 @@ public sealed class OpenAIFunctionToolCall { private string? _fullyQualifiedFunctionName; - /// Initialize the from a . - internal OpenAIFunctionToolCall(ChatCompletionsFunctionToolCall functionToolCall) + /// Initialize the from a . + internal OpenAIFunctionToolCall(ChatToolCall functionToolCall) { Verify.NotNull(functionToolCall); - Verify.NotNull(functionToolCall.Name); + Verify.NotNull(functionToolCall.FunctionName); - string fullyQualifiedFunctionName = functionToolCall.Name; + string fullyQualifiedFunctionName = functionToolCall.FunctionName; string functionName = fullyQualifiedFunctionName; - string? arguments = functionToolCall.Arguments; + string? arguments = functionToolCall.FunctionArguments; string? pluginName = null; int separatorPos = fullyQualifiedFunctionName.IndexOf(OpenAIFunction.NameSeparator, StringComparison.Ordinal); @@ -89,43 +89,43 @@ public override string ToString() /// /// Tracks tooling updates from streaming responses. /// - /// The tool call update to incorporate. + /// The tool call updates to incorporate. /// Lazily-initialized dictionary mapping indices to IDs. /// Lazily-initialized dictionary mapping indices to names. /// Lazily-initialized dictionary mapping indices to arguments. internal static void TrackStreamingToolingUpdate( - StreamingToolCallUpdate? update, + IReadOnlyList? updates, ref Dictionary? toolCallIdsByIndex, ref Dictionary? functionNamesByIndex, ref Dictionary? functionArgumentBuildersByIndex) { - if (update is null) + if (updates is null) { // Nothing to track. return; } - // If we have an ID, ensure the index is being tracked. Even if it's not a function update, - // we want to keep track of it so we can send back an error. - if (update.Id is string id) + foreach (var update in updates) { - (toolCallIdsByIndex ??= [])[update.ToolCallIndex] = id; - } + // If we have an ID, ensure the index is being tracked. Even if it's not a function update, + // we want to keep track of it so we can send back an error. + if (update.Id is string id) + { + (toolCallIdsByIndex ??= [])[update.Index] = id; + } - if (update is StreamingFunctionToolCallUpdate ftc) - { // Ensure we're tracking the function's name. - if (ftc.Name is string name) + if (update.FunctionName is string name) { - (functionNamesByIndex ??= [])[ftc.ToolCallIndex] = name; + (functionNamesByIndex ??= [])[update.Index] = name; } // Ensure we're tracking the function's arguments. - if (ftc.ArgumentsUpdate is string argumentsUpdate) + if (update.FunctionArgumentsUpdate is string argumentsUpdate) { - if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(ftc.ToolCallIndex, out StringBuilder? arguments)) + if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(update.Index, out StringBuilder? arguments)) { - functionArgumentBuildersByIndex[ftc.ToolCallIndex] = arguments = new(); + functionArgumentBuildersByIndex[update.Index] = arguments = new(); } arguments.Append(argumentsUpdate); @@ -134,20 +134,20 @@ internal static void TrackStreamingToolingUpdate( } /// - /// Converts the data built up by into an array of s. + /// Converts the data built up by into an array of s. /// /// Dictionary mapping indices to IDs. /// Dictionary mapping indices to names. /// Dictionary mapping indices to arguments. - internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + internal static ChatToolCall[] ConvertToolCallUpdatesToFunctionToolCalls( ref Dictionary? toolCallIdsByIndex, ref Dictionary? functionNamesByIndex, ref Dictionary? functionArgumentBuildersByIndex) { - ChatCompletionsFunctionToolCall[] toolCalls = []; + ChatToolCall[] toolCalls = []; if (toolCallIdsByIndex is { Count: > 0 }) { - toolCalls = new ChatCompletionsFunctionToolCall[toolCallIdsByIndex.Count]; + toolCalls = new ChatToolCall[toolCallIdsByIndex.Count]; int i = 0; foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex) @@ -158,7 +158,7 @@ internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCo functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName); functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments); - toolCalls[i] = new ChatCompletionsFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty); + toolCalls[i] = ChatToolCall.CreateFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty); i++; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs similarity index 56% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs index fa3845782d0a..e83c16cdc31e 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs @@ -1,14 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Text; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// Azure OpenAI and OpenAI Specialized streaming chat message content. +/// OpenAI specialized streaming chat message content. /// /// /// Represents a chat message content chunk that was streamed from the remote model. @@ -18,31 +19,53 @@ public sealed class OpenAIStreamingChatMessageContent : StreamingChatMessageCont /// /// The reason why the completion finished. /// - public CompletionsFinishReason? FinishReason { get; set; } + public ChatFinishReason? FinishReason { get; set; } /// /// Create a new instance of the class. /// - /// Internal Azure SDK Message update representation + /// Internal OpenAI SDK Message update representation /// Index of the choice /// The model ID used to generate the content /// Additional metadata internal OpenAIStreamingChatMessageContent( - StreamingChatCompletionsUpdate chatUpdate, + StreamingChatCompletionUpdate chatUpdate, int choiceIndex, string modelId, IReadOnlyDictionary? metadata = null) : base( - chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null, - chatUpdate.ContentUpdate, + null, + null, chatUpdate, choiceIndex, modelId, Encoding.UTF8, metadata) { - this.ToolCallUpdate = chatUpdate.ToolCallUpdate; - this.FinishReason = chatUpdate?.FinishReason; + try + { + this.FinishReason = chatUpdate.FinishReason; + + if (chatUpdate.Role.HasValue) + { + this.Role = new AuthorRole(chatUpdate.Role.ToString()!); + } + + if (chatUpdate.ToolCallUpdates is not null) + { + this.ToolCallUpdates = chatUpdate.ToolCallUpdates; + } + + if (chatUpdate.ContentUpdate is not null) + { + this.Items = CreateContentItems(chatUpdate.ContentUpdate); + } + } + catch (NullReferenceException) + { + // Temporary bugfix for: https://github.com/openai/openai-dotnet/issues/198 + // TODO: Remove this try-catch block once the bug is fixed. + } } /// @@ -50,7 +73,7 @@ internal OpenAIStreamingChatMessageContent( /// /// Author role of the message /// Content of the message - /// Tool call update + /// Tool call updates /// Completion finish reason /// Index of the choice /// The model ID used to generate the content @@ -58,8 +81,8 @@ internal OpenAIStreamingChatMessageContent( internal OpenAIStreamingChatMessageContent( AuthorRole? authorRole, string? content, - StreamingToolCallUpdate? tootToolCallUpdate = null, - CompletionsFinishReason? completionsFinishReason = null, + IReadOnlyList? toolCallUpdates = null, + ChatFinishReason? completionsFinishReason = null, int choiceIndex = 0, string? modelId = null, IReadOnlyDictionary? metadata = null) @@ -72,16 +95,32 @@ internal OpenAIStreamingChatMessageContent( Encoding.UTF8, metadata) { - this.ToolCallUpdate = tootToolCallUpdate; + this.ToolCallUpdates = toolCallUpdates; this.FinishReason = completionsFinishReason; } /// Gets any update information in the message about a tool call. - public StreamingToolCallUpdate? ToolCallUpdate { get; } + public IReadOnlyList? ToolCallUpdates { get; } /// public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString()); /// public override string ToString() => this.Content ?? string.Empty; + + private static StreamingKernelContentItemCollection CreateContentItems(IReadOnlyList contentUpdate) + { + StreamingKernelContentItemCollection collection = []; + + foreach (var content in contentUpdate) + { + // We only support text content for now. + if (content.Kind == ChatMessageContentPartKind.Text) + { + collection.Add(new StreamingTextContent(content.Text)); + } + } + + return collection; + } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs deleted file mode 100644 index 320a7b213bb3..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// Base type for OpenAI text to image clients. -internal sealed class OpenAITextToImageClientCore -{ - /// - /// Initializes a new instance of the class. - /// - /// The HttpClient used for making HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAITextToImageClientCore(HttpClient? httpClient, ILogger? logger = null) - { - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Run the HTTP request to generate a list of images - /// - /// URL for the text to image request API - /// Request payload - /// Function to invoke to extract the desired portion of the text to image response. - /// The to monitor for cancellation requests. The default is . - /// List of image URLs - [Experimental("SKEXP0010")] - internal async Task> ExecuteImageGenerationRequestAsync( - string url, - string requestBody, - Func extractResponseFunc, - CancellationToken cancellationToken = default) - { - var result = await this.ExecutePostRequestAsync(url, requestBody, cancellationToken).ConfigureAwait(false); - return result.Images.Select(extractResponseFunc).ToList(); - } - - /// - /// Add attribute to the internal attribute dictionary if the value is not null or empty. - /// - /// Attribute key - /// Attribute value - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - /// - /// Logger - /// - private readonly ILogger _logger; - - /// - /// The HttpClient used for making HTTP requests. - /// - private readonly HttpClient _httpClient; - - internal async Task ExecutePostRequestAsync(string url, string requestBody, CancellationToken cancellationToken = default) - { - using var content = new StringContent(requestBody, Encoding.UTF8, "application/json"); - using var response = await this.ExecuteRequestAsync(url, HttpMethod.Post, content, cancellationToken).ConfigureAwait(false); - string responseJson = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - T result = JsonSerializer.Deserialize(responseJson, JsonOptionsCache.ReadPermissive) ?? throw new KernelException("Response JSON parse error"); - return result; - } - - internal event EventHandler? RequestCreated; - - internal async Task ExecuteRequestAsync(string url, HttpMethod method, HttpContent? content, CancellationToken cancellationToken = default) - { - using var request = new HttpRequestMessage(method, url); - - if (content is not null) - { - request.Content = content; - } - - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAITextToImageClientCore))); - - this.RequestCreated?.Invoke(this, request); - - var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - - if (this._logger.IsEnabled(LogLevel.Debug)) - { - this._logger.LogDebug("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); - } - - return response; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs similarity index 90% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs index b4466a30af90..47697609aebc 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel; /// /// Chat history extensions. /// -public static class ChatHistoryExtensions +public static class OpenAIChatHistoryExtensions { /// /// Add a message to the chat history at the end of the streamed message @@ -43,7 +43,7 @@ public static async IAsyncEnumerable AddStreamingMe (contentBuilder ??= new()).Append(contentUpdate); } - OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); // Is always expected to have at least one chunk with the role provided from a streaming message streamedRole ??= chatMessage.Role; @@ -62,7 +62,7 @@ public static async IAsyncEnumerable AddStreamingMe role, contentBuilder?.ToString() ?? string.Empty, messageContents[0].ModelId!, - OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex), + OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex), metadata) { AuthorName = streamedName }); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs new file mode 100644 index 000000000000..c322ead2b671 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs @@ -0,0 +1,371 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Sponsor extensions class for . +/// +public static class OpenAIKernelBuilderExtensions +{ + #region Text Embedding + /// + /// Adds to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + #endregion + + #region Text to Image + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextToImage( + this IKernelBuilder builder, + string apiKey, + string? orgId = null, + string? modelId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + #endregion + + #region Text to Audio + + /// + /// Adds the OpenAI text-to-audio service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextToAudio( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToAudioService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + #endregion + + #region Audio-to-Text + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIAudioToText( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIAudioToText( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + #region Files + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + [Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations.")] + [ExcludeFromCodeCoverage] + public static IKernelBuilder AddOpenAIFiles( + this IKernelBuilder builder, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAIFileService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Chat Completion + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + /// + /// Adds the Custom Endpoint OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// Custom OpenAI Compatible Message API endpoint + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + Uri endpoint, + string? apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId: modelId, + apiKey: apiKey, + endpoint: endpoint, + organization: orgId, + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs index 6859e1225dd6..a0982942b222 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public static class OpenAIKernelFunctionMetadataExtensions { /// - /// Convert a to an . + /// Convert a to an . /// /// The object to convert. /// An object. diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs new file mode 100644 index 000000000000..0ac425a15593 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Provides extension methods for the class to configure OpenAI connector. +/// +public static class OpenAIMemoryBuilderExtensions +{ + /// + /// Adds the OpenAI text embeddings service. + /// See https://platform.openai.com/docs for service details. + /// + /// The instance + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// Custom for HTTP requests. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// Self instance + [Experimental("SKEXP0010")] + public static MemoryBuilder WithOpenAITextEmbeddingGeneration( + this MemoryBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), + loggerFactory, + dimensions)); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs similarity index 97% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs index 135b17b83df3..91da7138f9e4 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -20,7 +20,7 @@ public static class OpenAIPluginCollectionExtensions /// if the function was found; otherwise, . public static bool TryGetFunctionAndArguments( this IReadOnlyKernelPluginCollection plugins, - ChatCompletionsFunctionToolCall functionToolCall, + ChatToolCall functionToolCall, [NotNullWhen(true)] out KernelFunction? function, out KernelArguments? arguments) => plugins.TryGetFunctionAndArguments(new OpenAIFunctionToolCall(functionToolCall), out function, out arguments); diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs new file mode 100644 index 000000000000..ed191d3dda0f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs @@ -0,0 +1,345 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; + +namespace Microsoft.SemanticKernel; + +#pragma warning disable IDE0039 // Use local function + +/// +/// Sponsor extensions class for . +/// +public static class OpenAIServiceCollectionExtensions +{ + #region Text Embedding + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// The OpenAI model id. + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService(), + dimensions)); + } + #endregion + + #region Text to Image + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services, + string apiKey, + string? orgId = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + #endregion + + #region Text to Audio + + /// + /// Adds the OpenAI text-to-audio service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextToAudio( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToAudioService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIAudioToText( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIAudioToText( + this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + #endregion + + #region Files + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + [Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations.")] + [ExcludeFromCodeCoverage] + public static IServiceCollection AddOpenAIFiles( + this IServiceCollection services, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(apiKey); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAIFileService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + + return services; + } + + #endregion + + #region Chat Completion + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the Custom OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// A Custom Message API compatible endpoint. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIChatCompletion( + this IServiceCollection services, + string modelId, + Uri endpoint, + string? apiKey = null, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + endpoint, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs similarity index 95% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs index 8d87720fa89f..523b84dbe333 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs @@ -10,6 +10,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// https://platform.openai.com/docs/api-reference/files/object#files/object-purpose /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public readonly struct OpenAIFilePurpose : IEquatable { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs similarity index 84% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs index 371be0d93a33..e50a9185c20c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs @@ -9,6 +9,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// References an uploaded file by id. /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileReference { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs deleted file mode 100644 index 2a3d2ce7dd61..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using Azure.Core; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Provides extension methods for the class to configure OpenAI and AzureOpenAI connectors. -/// -public static class OpenAIMemoryBuilderExtensions -{ - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Model identifier - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs deleted file mode 100644 index 80cc60944965..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs +++ /dev/null @@ -1,2042 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.TextGeneration; -using Microsoft.SemanticKernel.TextToAudio; -using Microsoft.SemanticKernel.TextToImage; - -#pragma warning disable CA2000 // Dispose objects before losing scope -#pragma warning disable IDE0039 // Use local function - -namespace Microsoft.SemanticKernel; - -/// -/// Provides extension methods for and related classes to configure OpenAI and Azure OpenAI connectors. -/// -public static class OpenAIServiceCollectionExtensions -{ - #region Text Completion - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), httpClient ?? serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, credentials, httpClient ?? serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, credentials, serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddOpenAITextGeneration( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAITextGeneration( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IKernelBuilder AddOpenAITextGeneration( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAITextGeneration(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService())); - } - - #endregion - - #region Text Embedding - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credential); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credential); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// The OpenAI model id. - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService(), - dimensions)); - } - - #endregion - - #region Chat Completion - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion with data service to the list. - /// - /// The instance. - /// Required configuration for Azure OpenAI chat completion with data. - /// A local identifier for the given AI service. - /// The same instance as . - /// - /// More information: - /// - [Experimental("SKEXP0010")] - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - AzureOpenAIChatCompletionWithDataConfig config, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNull(config); - - Func factory = (serviceProvider, _) => - new(config, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion with data service to the list. - /// - /// The instance. - /// Required configuration for Azure OpenAI chat completion with data. - /// A local identifier for the given AI service. - /// The same instance as . - /// - /// More information: - /// - [Experimental("SKEXP0010")] - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - AzureOpenAIChatCompletionWithDataConfig config, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNull(config); - - Func factory = (serviceProvider, _) => - new(config, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAIChatCompletion( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAIChatCompletion(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Custom OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// A Custom Message API compatible endpoint. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAIChatCompletion( - this IServiceCollection services, - string modelId, - Uri endpoint, - string? apiKey = null, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, - endpoint, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Custom Endpoint OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// Custom OpenAI Compatible Message API endpoint - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - Uri endpoint, - string? apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId: modelId, - apiKey: apiKey, - endpoint: endpoint, - organization: orgId, - httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - loggerFactory: serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - #endregion - - #region Images - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - credentials, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - apiVersion)); - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - credentials, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - apiVersion)); - - return builder; - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - apiVersion)); - - return builder; - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// Maximum number of attempts to retrieve the text to image operation result. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - int maxRetryCount = 5) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextToImage( - this IKernelBuilder builder, - string apiKey, - string? orgId = null, - string? modelId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToImageService( - apiKey, - orgId, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services, - string apiKey, - string? orgId = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToImageService( - apiKey, - orgId, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// Model identifier - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// Model identifier - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - - return builder; - } - - #endregion - - #region Files - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAIFiles( - this IKernelBuilder builder, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAIFiles( - this IServiceCollection services, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - - return services; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment URL - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The API version to target. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAIFiles( - this IKernelBuilder builder, - string endpoint, - string apiKey, - string? orgId = null, - string? version = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - new Uri(endpoint), - apiKey, - orgId, - version, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment URL - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The API version to target. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAIFiles( - this IServiceCollection services, - string endpoint, - string apiKey, - string? orgId = null, - string? version = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - new Uri(endpoint), - apiKey, - orgId, - version, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - - return services; - } - - #endregion - - #region Text-to-Audio - - /// - /// Adds the Azure OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAITextToAudio( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToAudioService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds the Azure OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAITextToAudio( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToAudioService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Adds the OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAITextToAudio( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToAudioService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds the OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAITextToAudio( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToAudioService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - #endregion - - #region Audio-to-Text - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAIAudioToText( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAIAudioToText( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAIAudioToText( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAIAudioToText( - this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - #endregion - - private static OpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) => - new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); - - private static OpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => - new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs similarity index 62% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs index 3bebb4867af8..331da48cc08c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs @@ -5,27 +5,28 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.Services; +using OpenAI; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// OpenAI audio-to-text service. +/// OpenAI text-to-audio service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAIAudioToTextService : IAudioToTextService { - /// Core implementation shared by OpenAI services. - private readonly OpenAIClientCore _core; + /// + /// OpenAI text-to-audio client for HTTP operations. + /// + private readonly ClientCore _client; /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -39,19 +40,12 @@ public OpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new( - modelId: modelId, - apiKey: apiKey, - organization: organization, - httpClient: httpClient, - logger: loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); } /// - /// Creates an instance of the using the specified . + /// Initializes a new instance of the class. /// /// Model name /// Custom for HTTP requests. @@ -61,9 +55,8 @@ public OpenAIAudioToTextService( OpenAIClient openAIClient, ILoggerFactory? loggerFactory = null) { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); } /// @@ -72,5 +65,5 @@ public Task> GetTextContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetTextContentFromAudioAsync(content, executionSettings, cancellationToken); + => this._client.GetTextFromAudioContentsAsync(this._client.ModelId, content, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs similarity index 61% rename from dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs index a9f617efed73..f544d8a5c61c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs @@ -6,11 +6,13 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Services; using Microsoft.SemanticKernel.TextGeneration; +using OpenAI; + +#pragma warning disable CA1862 // Use the 'StringComparison' method overloads to perform case-insensitive string comparisons +#pragma warning disable RCS1155 // Use StringComparison when comparing strings namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -19,7 +21,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// public sealed class OpenAIChatCompletionService : IChatCompletionService, ITextGenerationService { - private readonly OpenAIClientCore _core; + /// Core implementation shared by OpenAI clients. + private readonly ClientCore _client; /// /// Create an instance of the OpenAI chat completion connector @@ -37,16 +40,13 @@ public OpenAIChatCompletionService( ILoggerFactory? loggerFactory = null ) { - this._core = new( + this._client = new( modelId, apiKey, - endpoint: null, organization, + endpoint: null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); } /// @@ -71,26 +71,27 @@ public OpenAIChatCompletionService( var providedEndpoint = endpoint ?? httpClient?.BaseAddress; if (providedEndpoint is not null) { - // If the provided endpoint does not have a path specified, updates it to the default Message API Chat Completions endpoint - internalClientEndpoint = providedEndpoint.PathAndQuery == "/" ? - new Uri(providedEndpoint, "v1/chat/completions") - : providedEndpoint; + // As OpenAI Client automatically adds the chat completions endpoint, we remove it to avoid duplication. + const string PathAndQueryPattern = "v1/chat/completions"; + var providedEndpointText = providedEndpoint.ToString(); + int index = providedEndpointText.IndexOf(PathAndQueryPattern, StringComparison.OrdinalIgnoreCase); + if (index >= 0) + { + internalClientEndpoint = new Uri($"{providedEndpointText.Substring(0, index)}{providedEndpointText.Substring(index + PathAndQueryPattern.Length)}"); + } + else + { + internalClientEndpoint = providedEndpoint; + } } - this._core = new( + this._client = new( modelId, apiKey, - internalClientEndpoint, organization, + internalClientEndpoint, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - if (providedEndpoint is not null) - { - this._core.AddAttribute(AIServiceExtensions.EndpointKey, providedEndpoint.ToString()); - } - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); } /// @@ -104,30 +105,44 @@ public OpenAIChatCompletionService( OpenAIClient openAIClient, ILoggerFactory? loggerFactory = null) { - this._core = new( + this._client = new( modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + public Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatMessageContentsAsync(this._client.ModelId, chatHistory, executionSettings, kernel, cancellationToken); /// - public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + public IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetStreamingChatMessageContentsAsync(this._client.ModelId, chatHistory, executionSettings, kernel, cancellationToken); /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + public Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatAsTextContentsAsync(this._client.ModelId, prompt, executionSettings, kernel, cancellationToken); /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); + public IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatAsTextStreamingContentsAsync(this._client.ModelId, prompt, executionSettings, kernel, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs similarity index 97% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs index 690954448eea..2b7f1bde31d8 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs @@ -20,8 +20,11 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// File service access for OpenAI: https://api.openai.com/v1/files /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileService { + private const string OrganizationKey = "Organization"; private const string HeaderNameAuthorization = "Authorization"; private const string HeaderNameAzureApiKey = "api-key"; private const string HeaderNameOpenAIAssistant = "OpenAI-Beta"; @@ -284,7 +287,7 @@ private void AddRequestHeaders(HttpRequestMessage request) if (!string.IsNullOrEmpty(this._organization)) { - this._httpClient.DefaultRequestHeaders.Add(OpenAIClientCore.OrganizationKey, this._organization); + this._httpClient.DefaultRequestHeaders.Add(OrganizationKey, this._organization); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs index c940a7caf291..aa70819020d0 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs @@ -6,24 +6,23 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Services; +using OpenAI; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// OpenAI text embedding service. +/// OpenAI implementation of /// [Experimental("SKEXP0010")] public sealed class OpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService { - private readonly OpenAIClientCore _core; + private readonly ClientCore _client; private readonly int? _dimensions; /// - /// Create an instance of the OpenAI text embedding connector + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -39,20 +38,20 @@ public OpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new( + Verify.NotNullOrWhiteSpace(modelId); + this._client = new( modelId: modelId, apiKey: apiKey, - organization: organization, + endpoint: null, + organizationId: organization, httpClient: httpClient, logger: loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._dimensions = dimensions; } /// - /// Create an instance of the OpenAI text embedding connector + /// Initializes a new instance of the class. /// /// Model name /// Custom for HTTP requests. @@ -64,14 +63,13 @@ public OpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - + Verify.NotNullOrWhiteSpace(modelId); + this._client = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); this._dimensions = dimensions; } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task>> GenerateEmbeddingsAsync( @@ -79,7 +77,7 @@ public Task>> GenerateEmbeddingsAsync( Kernel? kernel = null, CancellationToken cancellationToken = default) { - this._core.LogActionDetails(); - return this._core.GetEmbeddingsAsync(data, kernel, this._dimensions, cancellationToken); + this._client.LogActionDetails(); + return this._client.GetEmbeddingsAsync(this._client.ModelId, data, kernel, this._dimensions, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs similarity index 77% rename from dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs index 177acf539a41..93b5ede244fb 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs @@ -14,13 +14,13 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// /// OpenAI text-to-audio service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAITextToAudioService : ITextToAudioService { /// /// OpenAI text-to-audio client for HTTP operations. /// - private readonly OpenAITextToAudioClient _client; + private readonly ClientCore _client; /// /// Gets the attribute name used to store the organization in the dictionary. @@ -31,7 +31,7 @@ public sealed class OpenAITextToAudioService : ITextToAudioService public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -45,10 +45,8 @@ public OpenAITextToAudioService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._client = new(modelId, apiKey, organization, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); - - this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._client.AddAttribute(OrganizationKey, organization); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); } /// @@ -57,5 +55,5 @@ public Task> GetAudioContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._client.GetAudioContentsAsync(text, executionSettings, cancellationToken); + => this._client.GetAudioContentsAsync(this._client.ModelId, text, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs new file mode 100644 index 000000000000..f51e7d7c0141 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI text to image service. +/// +[Experimental("SKEXP0010")] +public class OpenAITextToImageService : ITextToImageService +{ + private readonly ClientCore _client; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Initializes a new instance of the class. + /// + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextToImageService( + string apiKey, + string? organization = null, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._client = new(modelId, apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(this.GetType())); + } + + /// + public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._client.GenerateImageAsync(this._client.ModelId, description, width, height, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs index ef7f5e54f7df..441d29c80607 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Text.Json; @@ -12,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// /// Execution settings for OpenAI audio-to-text request. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAIAudioToTextExecutionSettings : PromptExecutionSettings { /// @@ -35,6 +34,7 @@ public string Filename /// An optional language of the audio data as two-letter ISO-639-1 language code (e.g. 'en' or 'es'). /// [JsonPropertyName("language")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Language { get => this._language; @@ -50,6 +50,7 @@ public string? Language /// An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language. /// [JsonPropertyName("prompt")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Prompt { get => this._prompt; @@ -62,10 +63,11 @@ public string? Prompt } /// - /// The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. Default is 'json'. + /// The format of the transcript output, in one of these options: json, srt, verbose_json, or vtt. Default is 'json'. /// [JsonPropertyName("response_format")] - public string ResponseFormat + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ResponseFormat { get => this._responseFormat; @@ -83,7 +85,8 @@ public string ResponseFormat /// Default is 0. /// [JsonPropertyName("temperature")] - public float Temperature + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get => this._temperature; @@ -146,20 +149,15 @@ public override PromptExecutionSettings Clone() var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAIAudioToTextExecutionSettings)}", nameof(executionSettings)); + return openAIExecutionSettings!; } #region private ================================================================================ private const string DefaultFilename = "file.mp3"; - private float _temperature = 0; - private string _responseFormat = "json"; + private float? _temperature = 0; + private string? _responseFormat; private string _filename; private string? _language; private string? _prompt; diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs similarity index 85% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs index 42011da487f0..9412ea745fa3 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -8,6 +9,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// Execution serttings associated with Open AI file upload . /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileUploadExecutionSettings { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index 36796c62f7b9..f0c92e5af98f 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -1,14 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Text; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -16,7 +15,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// Execution settings for an OpenAI completion request. /// [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] -public sealed class OpenAIPromptExecutionSettings : PromptExecutionSettings +public class OpenAIPromptExecutionSettings : PromptExecutionSettings { /// /// Temperature controls the randomness of the completion. @@ -24,7 +23,8 @@ public sealed class OpenAIPromptExecutionSettings : PromptExecutionSettings /// Default is 1.0. /// [JsonPropertyName("temperature")] - public double Temperature + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? Temperature { get => this._temperature; @@ -41,7 +41,8 @@ public double Temperature /// Default is 1.0. /// [JsonPropertyName("top_p")] - public double TopP + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? TopP { get => this._topP; @@ -58,7 +59,8 @@ public double TopP /// model's likelihood to talk about new topics. /// [JsonPropertyName("presence_penalty")] - public double PresencePenalty + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? PresencePenalty { get => this._presencePenalty; @@ -75,7 +77,8 @@ public double PresencePenalty /// the model's likelihood to repeat the same line verbatim. /// [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? FrequencyPenalty { get => this._frequencyPenalty; @@ -90,6 +93,7 @@ public double FrequencyPenalty /// The maximum number of tokens to generate in the completion. /// [JsonPropertyName("max_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public int? MaxTokens { get => this._maxTokens; @@ -105,6 +109,7 @@ public int? MaxTokens /// Sequences where the completion will stop generating further tokens. /// [JsonPropertyName("stop_sequences")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public IList? StopSequences { get => this._stopSequences; @@ -116,28 +121,12 @@ public IList? StopSequences } } - /// - /// How many completions to generate for each prompt. Default is 1. - /// Note: Because this parameter generates many completions, it can quickly consume your token quota. - /// Use carefully and ensure that you have reasonable settings for max_tokens and stop. - /// - [JsonPropertyName("results_per_prompt")] - public int ResultsPerPrompt - { - get => this._resultsPerPrompt; - - set - { - this.ThrowIfFrozen(); - this._resultsPerPrompt = value; - } - } - /// /// If specified, the system will make a best effort to sample deterministically such that repeated requests with the /// same seed and parameters should return the same result. Determinism is not guaranteed. /// [JsonPropertyName("seed")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public long? Seed { get => this._seed; @@ -153,10 +142,11 @@ public long? Seed /// Gets or sets the response format to use for the completion. /// /// - /// Possible values are: "json_object", "text", object. + /// Possible values are: "json_object", "text", object. /// [Experimental("SKEXP0010")] [JsonPropertyName("response_format")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public object? ResponseFormat { get => this._responseFormat; @@ -173,6 +163,7 @@ public object? ResponseFormat /// Defaults to "Assistant is a large language model." /// [JsonPropertyName("chat_system_prompt")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? ChatSystemPrompt { get => this._chatSystemPrompt; @@ -188,6 +179,7 @@ public string? ChatSystemPrompt /// Modify the likelihood of specified tokens appearing in the completion. /// [JsonPropertyName("token_selection_biases")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public IDictionary? TokenSelectionBiases { get => this._tokenSelectionBiases; @@ -260,6 +252,7 @@ public string? User /// [Experimental("SKEXP0010")] [JsonPropertyName("logprobs")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public bool? Logprobs { get => this._logprobs; @@ -276,6 +269,7 @@ public bool? Logprobs /// [Experimental("SKEXP0010")] [JsonPropertyName("top_logprobs")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public int? TopLogprobs { get => this._topLogprobs; @@ -287,23 +281,6 @@ public int? TopLogprobs } } - /// - /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions. - /// This property is compatible only with Azure OpenAI. - /// - [Experimental("SKEXP0010")] - [JsonIgnore] - public AzureChatExtensionsOptions? AzureChatExtensionsOptions - { - get => this._azureChatExtensionsOptions; - - set - { - this.ThrowIfFrozen(); - this._azureChatExtensionsOptions = value; - } - } - /// public override void Freeze() { @@ -328,34 +305,9 @@ public override void Freeze() /// public override PromptExecutionSettings Clone() { - return new OpenAIPromptExecutionSettings() - { - ModelId = this.ModelId, - ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, - Temperature = this.Temperature, - TopP = this.TopP, - PresencePenalty = this.PresencePenalty, - FrequencyPenalty = this.FrequencyPenalty, - MaxTokens = this.MaxTokens, - StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, - ResultsPerPrompt = this.ResultsPerPrompt, - Seed = this.Seed, - ResponseFormat = this.ResponseFormat, - TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, - ToolCallBehavior = this.ToolCallBehavior, - User = this.User, - ChatSystemPrompt = this.ChatSystemPrompt, - Logprobs = this.Logprobs, - TopLogprobs = this.TopLogprobs, - AzureChatExtensionsOptions = this.AzureChatExtensionsOptions, - }; + return this.Clone(); } - /// - /// Default max tokens for a text generation - /// - internal static int DefaultTextMaxTokens { get; } = 256; - /// /// Create a new settings object with the values from another settings object. /// @@ -380,44 +332,46 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio var json = JsonSerializer.Serialize(executionSettings); var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAIPromptExecutionSettings)}", nameof(executionSettings)); + return openAIExecutionSettings!; } /// - /// Create a new settings object with the values from another settings object. + /// Clone the settings object. /// - /// Template configuration - /// Default max tokens - /// An instance of OpenAIPromptExecutionSettings - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + /// The type of the settings object to clone. + /// A new instance of the settings object. + protected internal T Clone() where T : OpenAIPromptExecutionSettings, new() { - var settings = FromExecutionSettings(executionSettings, defaultMaxTokens); - - if (settings.StopSequences?.Count == 0) + return new T() { - // Azure OpenAI WithData API does not allow to send empty array of stop sequences - // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0" - settings.StopSequences = null; - } - - return settings; + ModelId = this.ModelId, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + Temperature = this.Temperature, + TopP = this.TopP, + PresencePenalty = this.PresencePenalty, + FrequencyPenalty = this.FrequencyPenalty, + MaxTokens = this.MaxTokens, + StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, + Seed = this.Seed, + ResponseFormat = this.ResponseFormat, + TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, + ToolCallBehavior = this.ToolCallBehavior, + User = this.User, + ChatSystemPrompt = this.ChatSystemPrompt, + Logprobs = this.Logprobs, + TopLogprobs = this.TopLogprobs + }; } #region private ================================================================================ - private double _temperature = 1; - private double _topP = 1; - private double _presencePenalty; - private double _frequencyPenalty; + private double? _temperature; + private double? _topP; + private double? _presencePenalty; + private double? _frequencyPenalty; private int? _maxTokens; private IList? _stopSequences; - private int _resultsPerPrompt = 1; private long? _seed; private object? _responseFormat; private IDictionary? _tokenSelectionBiases; @@ -426,7 +380,6 @@ public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(Prompt private string? _chatSystemPrompt; private bool? _logprobs; private int? _topLogprobs; - private AzureChatExtensionsOptions? _azureChatExtensionsOptions; #endregion } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs similarity index 84% rename from dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs index ddb97ff93c35..cfb9cfa39dd0 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Text.Json; @@ -34,7 +33,8 @@ public string Voice /// The format to audio in. Supported formats are mp3, opus, aac, and flac. /// [JsonPropertyName("response_format")] - public string ResponseFormat + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ResponseFormat { get => this._responseFormat; @@ -49,7 +49,8 @@ public string ResponseFormat /// The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. /// [JsonPropertyName("speed")] - public float Speed + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Speed { get => this._speed; @@ -72,9 +73,9 @@ public OpenAITextToAudioExecutionSettings() /// Creates an instance of class. /// /// The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. - public OpenAITextToAudioExecutionSettings(string voice) + public OpenAITextToAudioExecutionSettings(string? voice) { - this._voice = voice; + this._voice = voice ?? DefaultVoice; } /// @@ -94,7 +95,7 @@ public override PromptExecutionSettings Clone() /// /// Instance of . /// Instance of . - public static OpenAITextToAudioExecutionSettings? FromExecutionSettings(PromptExecutionSettings? executionSettings) + public static OpenAITextToAudioExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) { if (executionSettings is null) { @@ -110,20 +111,15 @@ public override PromptExecutionSettings Clone() var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAITextToAudioExecutionSettings)}", nameof(executionSettings)); + return openAIExecutionSettings!; } #region private ================================================================================ private const string DefaultVoice = "alloy"; - private float _speed = 1.0f; - private string _responseFormat = "mp3"; + private float? _speed; + private string? _responseFormat; private string _voice; #endregion diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs deleted file mode 100644 index 20111ca99f88..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI text generation client. -/// -public sealed class AzureOpenAITextGenerationService : ITextGenerationService -{ - private readonly AzureOpenAIClientCore _core; - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - /// Creates a new client instance using API Key auth - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - /// Creates a new client instance supporting AAD auth - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - /// Creates a new client instance using the specified OpenAIClient - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - OpenAIClient openAIClient, - string? modelId = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs deleted file mode 100644 index 1133865171fd..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text generation service. -/// -public sealed class OpenAITextGenerationService : ITextGenerationService -{ - private readonly OpenAIClientCore _core; - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - /// Create an instance of the OpenAI text generation connector - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextGenerationService( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new( - modelId: modelId, - apiKey: apiKey, - organization: organization, - httpClient: httpClient, - logger: loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); - } - - /// - /// Create an instance of the OpenAI text generation connector - /// - /// Model name - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextGenerationService( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) - { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs deleted file mode 100644 index bc7aeede3b57..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text-to-audio request model, see . -/// -internal sealed class TextToAudioRequest(string model, string input, string voice) -{ - [JsonPropertyName("model")] - public string Model { get; set; } = model; - - [JsonPropertyName("input")] - public string Input { get; set; } = input; - - [JsonPropertyName("voice")] - public string Voice { get; set; } = voice; - - [JsonPropertyName("response_format")] - public string ResponseFormat { get; set; } = "mp3"; - - [JsonPropertyName("speed")] - public float Speed { get; set; } = 1.0f; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs deleted file mode 100644 index efa3ffcc87c0..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextToImage; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI Image generation -/// -/// -[Experimental("SKEXP0010")] -public sealed class AzureOpenAITextToImageService : ITextToImageService -{ - private readonly OpenAIClient _client; - private readonly ILogger _logger; - private readonly string _deploymentName; - private readonly Dictionary _attributes = []; - - /// - public IReadOnlyDictionary Attributes => this._attributes; - - /// - /// Gets the key used to store the deployment name in the dictionary. - /// - public static string DeploymentNameKey => "DeploymentName"; - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// Model identifier - /// Custom for HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAITextToImageService( - string deploymentName, - string endpoint, - string apiKey, - string? modelId, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null, - string? apiVersion = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - var connectorEndpoint = (!string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri) ?? - throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); - - this._client = new(new Uri(connectorEndpoint), - new AzureKeyCredential(apiKey), - GetClientOptions(httpClient, apiVersion)); - } - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// Custom for HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAITextToImageService( - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null, - string? apiVersion = null) - { - Verify.NotNull(credential); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; - if (connectorEndpoint is null) - { - throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); - } - - this._client = new(new Uri(connectorEndpoint), - credential, - GetClientOptions(httpClient, apiVersion)); - } - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// to use for the service. - /// Model identifier - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - public AzureOpenAITextToImageService( - string deploymentName, - OpenAIClient openAIClient, - string? modelId, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(openAIClient); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - this._client = openAIClient; - } - - /// - public async Task GenerateImageAsync( - string description, - int width, - int height, - Kernel? kernel = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - - var size = (width, height) switch - { - (1024, 1024) => ImageSize.Size1024x1024, - (1792, 1024) => ImageSize.Size1792x1024, - (1024, 1792) => ImageSize.Size1024x1792, - _ => throw new NotSupportedException("Dall-E 3 can only generate images of the following sizes 1024x1024, 1792x1024, or 1024x1792") - }; - - Response imageGenerations; - try - { - imageGenerations = await this._client.GetImageGenerationsAsync( - new ImageGenerationOptions - { - DeploymentName = this._deploymentName, - Prompt = description, - Size = size, - }, cancellationToken).ConfigureAwait(false); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - - if (!imageGenerations.HasValue) - { - throw new KernelException("The response does not contain an image result"); - } - - if (imageGenerations.Value.Data.Count == 0) - { - throw new KernelException("The response does not contain any image"); - } - - return imageGenerations.Value.Data[0].Url.AbsoluteUri; - } - - private static OpenAIClientOptions GetClientOptions(HttpClient? httpClient, string? apiVersion) => - ClientCore.GetOpenAIClientOptions(httpClient, apiVersion switch - { - // DALL-E 3 is supported in the latest API releases - _ => OpenAIClientOptions.ServiceVersion.V2024_02_15_Preview - }); - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this._attributes.Add(key, value); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs deleted file mode 100644 index 335fe8cad5ee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextToImage; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text to image service. -/// -[Experimental("SKEXP0010")] -public sealed class OpenAITextToImageService : ITextToImageService -{ - private readonly OpenAITextToImageClientCore _core; - - /// - /// OpenAI REST API endpoint - /// - private const string OpenAIEndpoint = "https://api.openai.com/v1/images/generations"; - - /// - /// Optional value for the OpenAI-Organization header. - /// - private readonly string? _organizationHeaderValue; - - /// - /// Value for the authorization header. - /// - private readonly string _authorizationHeaderValue; - - /// - /// The model to use for image generation. - /// - private readonly string? _modelId; - - /// - /// Initializes a new instance of the class. - /// - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextToImageService( - string apiKey, - string? organization = null, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - this._authorizationHeaderValue = $"Bearer {apiKey}"; - this._organizationHeaderValue = organization; - this._modelId = modelId; - - this._core = new(httpClient, loggerFactory?.CreateLogger(this.GetType())); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); - if (modelId is not null) - { - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - this._core.RequestCreated += (_, request) => - { - request.Headers.Add("Authorization", this._authorizationHeaderValue); - if (!string.IsNullOrEmpty(this._organizationHeaderValue)) - { - request.Headers.Add("OpenAI-Organization", this._organizationHeaderValue); - } - }; - } - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - if (width != height || (width != 256 && width != 512 && width != 1024)) - { - throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024."); - } - - return this.GenerateImageAsync(this._modelId, description, width, height, "url", x => x.Url, cancellationToken); - } - - private async Task GenerateImageAsync( - string? model, - string description, - int width, int height, - string format, Func extractResponse, - CancellationToken cancellationToken) - { - Verify.NotNull(extractResponse); - - var requestBody = JsonSerializer.Serialize(new TextToImageRequest - { - Model = model, - Prompt = description, - Size = $"{width}x{height}", - Count = 1, - Format = format, - }); - - var list = await this._core.ExecuteImageGenerationRequestAsync(OpenAIEndpoint, requestBody, extractResponse!, cancellationToken).ConfigureAwait(false); - return list[0]; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs deleted file mode 100644 index 70b5ac5418ee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Text to image request -/// -internal sealed class TextToImageRequest -{ - /// - /// Model to use for image generation - /// - [JsonPropertyName("model")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Model { get; set; } - - /// - /// Image prompt - /// - [JsonPropertyName("prompt")] - public string Prompt { get; set; } = string.Empty; - - /// - /// Image size - /// - [JsonPropertyName("size")] - public string Size { get; set; } = "256x256"; - - /// - /// How many images to generate - /// - [JsonPropertyName("n")] - public int Count { get; set; } = 1; - - /// - /// Image format, "url" or "b64_json" - /// - [JsonPropertyName("response_format")] - public string Format { get; set; } = "url"; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs deleted file mode 100644 index cba10ba14331..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Text to image response -/// -internal sealed class TextToImageResponse -{ - /// - /// OpenAI Image response - /// - public sealed class Image - { - /// - /// URL to the image created - /// - [JsonPropertyName("url")] - [SuppressMessage("Design", "CA1056:URI return values should not be strings", Justification = "Using the original value")] - public string Url { get; set; } = string.Empty; - - /// - /// Image content in base64 format - /// - [JsonPropertyName("b64_json")] - public string AsBase64 { get; set; } = string.Empty; - } - - /// - /// List of possible images - /// - [JsonPropertyName("data")] - public IList Images { get; set; } = []; - - /// - /// Creation time - /// - [JsonPropertyName("created")] - public int CreatedTime { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs index 7a5490c736ea..57bc8f391573 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs @@ -4,9 +4,10 @@ using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text.Json; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -94,6 +95,7 @@ private ToolCallBehavior(bool autoInvoke) /// Options to control tool call result serialization behavior. /// [Obsolete("This property is deprecated in favor of Kernel.SerializerOptions that will be introduced in one of the following releases.")] + [ExcludeFromCodeCoverage] [EditorBrowsable(EditorBrowsableState.Never)] public virtual JsonSerializerOptions? ToolCallResultSerializerOptions { get; set; } @@ -118,10 +120,9 @@ private ToolCallBehavior(bool autoInvoke) /// true if it's ok to invoke any kernel function requested by the model if it's found; false if a request needs to be validated against an allow list. internal virtual bool AllowAnyRequestedKernelFunction => false; - /// Configures the with any tools this provides. - /// The used for the operation. This can be queried to determine what tools to provide into the . - /// The destination to configure. - internal abstract void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options); + /// Returns list of available tools and the way model should use them. + /// The used for the operation. This can be queried to determine what tools to return. + internal abstract (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel); /// /// Represents a that will provide to the model all available functions from a @@ -133,8 +134,11 @@ internal KernelFunctions(bool autoInvoke) : base(autoInvoke) { } public override string ToString() => $"{nameof(KernelFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0})"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { + ChatToolChoice? choice = null; + List? tools = null; + // If no kernel is provided, we don't have any tools to provide. if (kernel is not null) { @@ -142,13 +146,16 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o IList functions = kernel.Plugins.GetFunctionsMetadata(); if (functions.Count > 0) { - options.ToolChoice = ChatCompletionsToolChoice.Auto; + choice = ChatToolChoice.Auto; + tools = []; for (int i = 0; i < functions.Count; i++) { - options.Tools.Add(new ChatCompletionsFunctionToolDefinition(functions[i].ToOpenAIFunction().ToFunctionDefinition())); + tools.Add(functions[i].ToOpenAIFunction().ToFunctionDefinition()); } } } + + return (tools, choice); } internal override bool AllowAnyRequestedKernelFunction => true; @@ -160,26 +167,29 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o internal sealed class EnabledFunctions : ToolCallBehavior { private readonly OpenAIFunction[] _openAIFunctions; - private readonly ChatCompletionsFunctionToolDefinition[] _functions; + private readonly ChatTool[] _functions; public EnabledFunctions(IEnumerable functions, bool autoInvoke) : base(autoInvoke) { this._openAIFunctions = functions.ToArray(); - var defs = new ChatCompletionsFunctionToolDefinition[this._openAIFunctions.Length]; + var defs = new ChatTool[this._openAIFunctions.Length]; for (int i = 0; i < defs.Length; i++) { - defs[i] = new ChatCompletionsFunctionToolDefinition(this._openAIFunctions[i].ToFunctionDefinition()); + defs[i] = this._openAIFunctions[i].ToFunctionDefinition(); } this._functions = defs; } - public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.Name))}"; + public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.FunctionName))}"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { + ChatToolChoice? choice = null; + List? tools = null; + OpenAIFunction[] openAIFunctions = this._openAIFunctions; - ChatCompletionsFunctionToolDefinition[] functions = this._functions; + ChatTool[] functions = this._functions; Debug.Assert(openAIFunctions.Length == functions.Length); if (openAIFunctions.Length > 0) @@ -196,7 +206,8 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o throw new KernelException($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided."); } - options.ToolChoice = ChatCompletionsToolChoice.Auto; + choice = ChatToolChoice.Auto; + tools = []; for (int i = 0; i < openAIFunctions.Length; i++) { // Make sure that if auto-invocation is specified, every enabled function can be found in the kernel. @@ -211,9 +222,11 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o } // Add the function. - options.Tools.Add(functions[i]); + tools.Add(functions[i]); } } + + return (tools, choice); } } @@ -221,19 +234,19 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o internal sealed class RequiredFunction : ToolCallBehavior { private readonly OpenAIFunction _function; - private readonly ChatCompletionsFunctionToolDefinition _tool; - private readonly ChatCompletionsToolChoice _choice; + private readonly ChatTool _tool; + private readonly ChatToolChoice _choice; public RequiredFunction(OpenAIFunction function, bool autoInvoke) : base(autoInvoke) { this._function = function; - this._tool = new ChatCompletionsFunctionToolDefinition(function.ToFunctionDefinition()); - this._choice = new ChatCompletionsToolChoice(this._tool); + this._tool = function.ToFunctionDefinition(); + this._choice = new ChatToolChoice(this._tool); } - public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.Name}"; + public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.FunctionName}"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { bool autoInvoke = base.MaximumAutoInvokeAttempts > 0; @@ -253,8 +266,7 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o throw new KernelException($"The specified {nameof(RequiredFunction)} function {this._function.FullyQualifiedName} is not available in the kernel."); } - options.ToolChoice = this._choice; - options.Tools.Add(this._tool); + return ([this._tool], this._choice); } /// Gets how many requests are part of a single interaction should include this tool in the request. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj index a4b7bd6ace44..17ac2e2510a9 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj @@ -12,9 +12,9 @@ - - - + + + runtime; build; native; contentfiles; analyzers; buildtransitive all @@ -29,31 +29,23 @@ - - + + + - - - - - - - - - - - - - - - - - Always - + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs b/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs deleted file mode 100644 index d7e81f129c9c..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Threading; -using System.Threading.Tasks; - -namespace SemanticKernel.Connectors.UnitTests; - -internal sealed class MultipleHttpMessageHandlerStub : DelegatingHandler -{ - private int _callIteration = 0; - - public List RequestHeaders { get; private set; } - - public List ContentHeaders { get; private set; } - - public List RequestContents { get; private set; } - - public List RequestUris { get; private set; } - - public List Methods { get; private set; } - - public List ResponsesToReturn { get; set; } - - public MultipleHttpMessageHandlerStub() - { - this.RequestHeaders = []; - this.ContentHeaders = []; - this.RequestContents = []; - this.RequestUris = []; - this.Methods = []; - this.ResponsesToReturn = []; - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - this._callIteration++; - - this.Methods.Add(request.Method); - this.RequestUris.Add(request.RequestUri); - this.RequestHeaders.Add(request.Headers); - this.ContentHeaders.Add(request.Content?.Headers); - - var content = request.Content is null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); - - this.RequestContents.Add(content); - - return await Task.FromResult(this.ResponsesToReturn[this._callIteration - 1]); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs deleted file mode 100644 index 39bc2803fe19..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.TextGeneration; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests of . -/// -public class AIServicesOpenAIExtensionsTests -{ - [Fact] - public void ItSucceedsWhenAddingDifferentServiceTypeWithSameId() - { - Kernel targetKernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration("depl", "https://url", "key", "azure") - .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url", "key", "azure") - .Build(); - - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("azure")); - } - - [Fact] - public void ItTellsIfAServiceIsAvailable() - { - Kernel targetKernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration("depl", "https://url", "key", serviceId: "azure") - .AddOpenAITextGeneration("model", "apikey", serviceId: "oai") - .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url2", "key", serviceId: "azure") - .AddOpenAITextEmbeddingGeneration("model2", "apikey2", serviceId: "oai2") - .Build(); - - // Assert - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("oai")); - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("oai")); - } - - [Fact] - public void ItCanOverwriteServices() - { - // Arrange - // Act - Assert no exception occurs - var builder = Kernel.CreateBuilder(); - - builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); - - builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); - - builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); - builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); - - builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); - - builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); - builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); - - builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); - builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); - - builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); - builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); - - builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); - builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); - - builder.Build(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs deleted file mode 100644 index f3dd1850d56e..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections; -using System.Collections.Generic; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAIWithDataChatMessageContentTests -{ - [Fact] - public void ConstructorThrowsExceptionWhenAssistantMessageIsNotProvided() - { - // Arrange - var choice = new ChatWithDataChoice(); - - // Act & Assert - var exception = Assert.Throws(() => new AzureOpenAIWithDataChatMessageContent(choice, "model-id")); - - Assert.Contains("Chat is not valid", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void ConstructorReturnsInstanceWithNullToolContent() - { - // Arrange - var choice = new ChatWithDataChoice { Messages = [new() { Content = "Assistant content", Role = "assistant" }] }; - - // Act - var content = new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.Null(content.ToolContent); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorReturnsInstanceWithNonNullToolContent(bool includeMetadata) - { - // Arrange - var choice = new ChatWithDataChoice - { - Messages = [ - new() { Content = "Assistant content", Role = "assistant" }, - new() { Content = "Tool content", Role = "tool" }] - }; - - // Act - var content = includeMetadata ? - new AzureOpenAIWithDataChatMessageContent(choice, "model-id", new Dictionary()) : - new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal("Tool content", content.ToolContent); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.NotNull(content.Metadata); - Assert.Equal("Tool content", content.Metadata["ToolContent"]); - } - - [Fact] - public void ConstructorCloneReadOnlyMetadataDictionary() - { - // Arrange - var choice = new ChatWithDataChoice - { - Messages = [new() { Content = "Assistant content", Role = "assistant" }] - }; - - var metadata = new ReadOnlyInternalDictionary(new Dictionary() { ["Extra"] = "Data" }); - - // Act - var content = new AzureOpenAIWithDataChatMessageContent(choice, "model-id", metadata); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.NotNull(content.Metadata); - Assert.Equal("Data", content.Metadata["Extra"]); - } - - private sealed class ReadOnlyInternalDictionary : IReadOnlyDictionary - { - public ReadOnlyInternalDictionary(IDictionary initializingData) - { - this._internalDictionary = new Dictionary(initializingData); - } - private readonly Dictionary _internalDictionary; - - public object? this[string key] => this._internalDictionary[key]; - - public IEnumerable Keys => this._internalDictionary.Keys; - - public IEnumerable Values => this._internalDictionary.Values; - - public int Count => this._internalDictionary.Count; - - public bool ContainsKey(string key) => this._internalDictionary.ContainsKey(key); - - public IEnumerator> GetEnumerator() => this._internalDictionary.GetEnumerator(); - - public bool TryGetValue(string key, out object? value) => this._internalDictionary.TryGetValue(key, out value); - - IEnumerator IEnumerable.GetEnumerator() => this._internalDictionary.GetEnumerator(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs deleted file mode 100644 index 45597c616270..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAIWithDataStreamingChatMessageContentTests -{ - [Theory] - [MemberData(nameof(ValidChoices))] - public void ConstructorWithValidChoiceSetsNonEmptyContent(object choice, string expectedContent) - { - // Arrange - var streamingChoice = choice as ChatWithDataStreamingChoice; - - // Act - var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); - - // Assert - Assert.Equal(expectedContent, content.Content); - } - - [Theory] - [MemberData(nameof(InvalidChoices))] - public void ConstructorWithInvalidChoiceSetsNullContent(object choice) - { - // Arrange - var streamingChoice = choice as ChatWithDataStreamingChoice; - - // Act - var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); - - // Assert - Assert.Null(content.Content); - } - - public static IEnumerable ValidChoices - { - get - { - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 1" } }] }, "Content 1" }; - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 2", Role = "Assistant" } }] }, "Content 2" }; - } - } - - public static IEnumerable InvalidChoices - { - get - { - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { EndTurn = true }] } }; - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content", Role = "tool" } }] } }; - } - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs deleted file mode 100644 index fd0a830cc2d9..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -/// -/// Unit tests for class. -/// -public sealed class OpenAIStreamingTextContentTests -{ - [Fact] - public void ToByteArrayWorksCorrectly() - { - // Arrange - var expectedBytes = Encoding.UTF8.GetBytes("content"); - var content = new OpenAIStreamingTextContent("content", 0, "model-id"); - - // Act - var actualBytes = content.ToByteArray(); - - // Assert - Assert.Equal(expectedBytes, actualBytes); - } - - [Theory] - [InlineData(null, "")] - [InlineData("content", "content")] - public void ToStringWorksCorrectly(string? content, string expectedString) - { - // Arrange - var textContent = new OpenAIStreamingTextContent(content!, 0, "model-id"); - - // Act - var actualString = textContent.ToString(); - - // Assert - Assert.Equal(expectedString, actualString); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs deleted file mode 100644 index 54a183eca330..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using Azure; -using Azure.Core; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -/// -/// Unit tests for class. -/// -public sealed class RequestFailedExceptionExtensionsTests -{ - [Theory] - [InlineData(0, null)] - [InlineData(500, HttpStatusCode.InternalServerError)] - public void ToHttpOperationExceptionWithStatusReturnsValidException(int responseStatus, HttpStatusCode? httpStatusCode) - { - // Arrange - var exception = new RequestFailedException(responseStatus, "Error Message"); - - // Act - var actualException = exception.ToHttpOperationException(); - - // Assert - Assert.IsType(actualException); - Assert.Equal(httpStatusCode, actualException.StatusCode); - Assert.Equal("Error Message", actualException.Message); - Assert.Same(exception, actualException.InnerException); - } - - [Fact] - public void ToHttpOperationExceptionWithContentReturnsValidException() - { - // Arrange - using var response = new FakeResponse("Response Content", 500); - var exception = new RequestFailedException(response); - - // Act - var actualException = exception.ToHttpOperationException(); - - // Assert - Assert.IsType(actualException); - Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode); - Assert.Equal("Response Content", actualException.ResponseContent); - Assert.Same(exception, actualException.InnerException); - } - - #region private - - private sealed class FakeResponse(string responseContent, int status) : Response - { - private readonly string _responseContent = responseContent; - private readonly IEnumerable _headers = []; - - public override BinaryData Content => BinaryData.FromString(this._responseContent); - public override int Status { get; } = status; - public override string ReasonPhrase => "Reason Phrase"; - public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); } - public override string ClientRequestId { get => "Client Request Id"; set => throw new NotImplementedException(); } - - public override void Dispose() { } - protected override bool ContainsHeader(string name) => throw new NotImplementedException(); - protected override IEnumerable EnumerateHeaders() => this._headers; -#pragma warning disable CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). - protected override bool TryGetHeader(string name, out string? value) => throw new NotImplementedException(); - protected override bool TryGetHeaderValues(string name, out IEnumerable? values) => throw new NotImplementedException(); -#pragma warning restore CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs deleted file mode 100644 index 782267039c59..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletionWithData; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for -/// -public sealed class AzureOpenAIChatCompletionWithDataTests : IDisposable -{ - private readonly AzureOpenAIChatCompletionWithDataConfig _config; - - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAIChatCompletionWithDataTests() - { - this._config = this.GetConfig(); - - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient, this._mockLoggerFactory.Object) : - new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - - // Assert - Assert.NotNull(service); - Assert.Equal("fake-completion-model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task SpecifiedConfigurationShouldBeUsedAsync() - { - // Arrange - const string ExpectedUri = "https://fake-completion-endpoint/openai/deployments/fake-completion-model-id/extensions/chat/completions?api-version=fake-api-version"; - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - - // Act - await service.GetChatMessageContentsAsync([]); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - var actualRequestHeaderValues = this._messageHandlerStub.RequestHeaders!.GetValues("Api-Key"); - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - - Assert.Equal(ExpectedUri, actualUri); - - Assert.Contains("fake-completion-api-key", actualRequestHeaderValues); - Assert.Contains("https://fake-data-source-endpoint", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-api-key", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-index", actualRequestContent, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task DefaultApiVersionShouldBeUsedAsync() - { - // Arrange - var config = this.GetConfig(); - config.CompletionApiVersion = string.Empty; - - var service = new AzureOpenAIChatCompletionWithDataService(config, this._httpClient); - - // Act - await service.GetChatMessageContentsAsync([]); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - - Assert.Contains("2024-02-01", actualUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task GetChatMessageContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) - }; - - // Act - var result = await service.GetChatMessageContentsAsync([]); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat with data response", result[0].Content); - - var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([])) - { - Assert.Equal("Test chat with data streaming response", chunk.Content); - } - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat with data response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat with data streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - private AzureOpenAIChatCompletionWithDataConfig GetConfig() - { - return new AzureOpenAIChatCompletionWithDataConfig - { - CompletionModelId = "fake-completion-model-id", - CompletionEndpoint = "https://fake-completion-endpoint", - CompletionApiKey = "fake-completion-api-key", - CompletionApiVersion = "fake-api-version", - DataSourceEndpoint = "https://fake-data-source-endpoint", - DataSourceApiKey = "fake-data-source-api-key", - DataSourceIndex = "fake-data-source-index" - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs deleted file mode 100644 index 08bde153aa4a..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Core; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Memory; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests for class. -/// -public sealed class OpenAIMemoryBuilderExtensionsTests -{ - private readonly Mock _mockMemoryStore = new(); - - [Fact] - public void AzureOpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - - // Act - var memory = builder - .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key", "model-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } - - [Fact] - public void AzureOpenAITextEmbeddingGenerationWithTokenCredentialWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - var memory = builder - .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials, "model-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } - - [Fact] - public void OpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - - // Act - var memory = builder - .WithOpenAITextEmbeddingGeneration("model-id", "api-key", "organization-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs deleted file mode 100644 index 5cc41c3c881e..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,746 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.TextGeneration; -using Microsoft.SemanticKernel.TextToAudio; -using Microsoft.SemanticKernel.TextToImage; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class OpenAIServiceCollectionExtensionsTests : IDisposable -{ - private readonly HttpClient _httpClient; - - public OpenAIServiceCollectionExtensionsTests() - { - this._httpClient = new HttpClient(); - } - - #region Text generation - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextGeneration("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextGeneration("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAITextGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAITextGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextGeneration("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAITextGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextGeneration("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextGenerationService); - } - - #endregion - - #region Text embeddings - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAITextEmbeddingGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextEmbeddingGeneration("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextEmbeddingGeneration("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextEmbeddingGenerationService); - } - - #endregion - - #region Chat completion - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - [InlineData(InitializationType.ChatCompletionWithData)] - public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var config = this.GetCompletionWithDataConfig(); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"), - InitializationType.ChatCompletionWithData => builder.AddAzureOpenAIChatCompletion(config), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - - if (type == InitializationType.ChatCompletionWithData) - { - Assert.True(service is AzureOpenAIChatCompletionWithDataService); - } - else - { - Assert.True(service is AzureOpenAIChatCompletionService); - } - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - [InlineData(InitializationType.ChatCompletionWithData)] - public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var config = this.GetCompletionWithDataConfig(); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"), - InitializationType.ChatCompletionWithData => builder.Services.AddAzureOpenAIChatCompletion(config), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - - if (type == InitializationType.ChatCompletionWithData) - { - Assert.True(service is AzureOpenAIChatCompletionWithDataService); - } - else - { - Assert.True(service is AzureOpenAIChatCompletionService); - } - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientEndpoint)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAIChatCompletion("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAIChatCompletion("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIChatCompletion("model-id"), - InitializationType.OpenAIClientEndpoint => builder.AddOpenAIChatCompletion("model-id", new Uri("http://localhost:12345"), "apikey"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIChatCompletionService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientEndpoint)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAIChatCompletion("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAIChatCompletion("model-id", client), - InitializationType.OpenAIClientEndpoint => builder.Services.AddOpenAIChatCompletion("model-id", new Uri("http://localhost:12345"), "apikey"), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAIChatCompletion("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIChatCompletionService); - } - - #endregion - - #region Text to image - - [Fact] - public void KernelBuilderAddAzureOpenAITextToImageAddsValidServiceWithTokenCredentials() - { - // Arrange - var builder = Kernel.CreateBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - builder = builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToImageAddsValidServiceTokenCredentials() - { - // Arrange - var builder = Kernel.CreateBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void KernelBuilderAddAzureOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void KernelBuilderAddOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddOpenAITextToImage("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddOpenAITextToImage("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToImageService); - } - - #endregion - - #region Text to audio - - [Fact] - public void KernelBuilderAddAzureOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToAudioService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToAudioService); - } - - [Fact] - public void KernelBuilderAddOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddOpenAITextToAudio("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToAudioService); - } - - [Fact] - public void ServiceCollectionAddOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddOpenAITextToAudio("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToAudioService); - } - - #endregion - - #region Audio to text - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAIAudioToText("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIAudioToText("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIAudioToText("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIAudioToText("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAIAudioToText("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAIAudioToText("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIAudioToText("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAIAudioToText("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAIAudioToText("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAIAudioToText("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIAudioToTextService); - } - - #endregion - - public void Dispose() - { - this._httpClient.Dispose(); - } - - public enum InitializationType - { - ApiKey, - TokenCredential, - OpenAIClientInline, - OpenAIClientInServiceProvider, - OpenAIClientEndpoint, - ChatCompletionWithData - } - - private AzureOpenAIChatCompletionWithDataConfig GetCompletionWithDataConfig() - { - return new() - { - CompletionApiKey = "completion-api-key", - CompletionApiVersion = "completion-v1", - CompletionEndpoint = "https://completion-endpoint", - CompletionModelId = "completion-model-id", - DataSourceApiKey = "data-source-api-key", - DataSourceEndpoint = "https://data-source-endpoint", - DataSourceIndex = "data-source-index" - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs deleted file mode 100644 index f6ee6bb93a11..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.IO; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Helper for OpenAI test purposes. -/// -internal static class OpenAITestHelper -{ - /// - /// Reads test response from file for mocking purposes. - /// - /// Name of the file with test response. - internal static string GetTestResponse(string fileName) - { - return File.ReadAllText($"./OpenAI/TestData/{fileName}"); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt deleted file mode 100644 index 0e26da41d32b..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt +++ /dev/null @@ -1,5 +0,0 @@ -data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"tool-call-id-1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} - -data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"tool-call-id-2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} - -data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs deleted file mode 100644 index 640280830ba2..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextEmbeddingGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextEmbeddingGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - - // Act - var result = await service.GenerateEmbeddingsAsync([]); - - // Assert - Assert.Empty(result); - } - - [Fact] - public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); - Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); - } - - [Fact] - public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - var result = await service.GenerateEmbeddingsAsync(["test"]); - - // Assert - Assert.Single(result); - - var memory = result[0]; - - Assert.Equal(0.018990106880664825, memory.Span[0]); - Assert.Equal(-0.0073809814639389515, memory.Span[1]); - } - - [Fact] - public async Task GenerateEmbeddingsWithDimensionsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService( - "deployment-name", - "https://endpoint", - "api-key", - "model-id", - this._httpClient, - dimensions: 256); - - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - await service.GenerateEmbeddingsAsync(["test"]); - - var requestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - var optionsJson = JsonSerializer.Deserialize(requestContent); - - // Assert - Assert.Equal(256, optionsJson.GetProperty("dimensions").GetInt32()); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - #region private - - private HttpResponseMessage SuccessfulResponse - => new(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [ - { - "object": "embedding", - "embedding": [ - 0.018990106880664825, - -0.0073809814639389515 - ], - "index": 0 - } - ], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs deleted file mode 100644 index 76638ae9cc9f..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextEmbeddingGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextEmbeddingGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new OpenAITextEmbeddingGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextEmbeddingGenerationService("model-id", client); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - - // Act - var result = await service.GenerateEmbeddingsAsync([]); - - // Assert - Assert.Empty(result); - } - - [Fact] - public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); - Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); - } - - [Fact] - public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - var result = await service.GenerateEmbeddingsAsync(["test"]); - - // Assert - Assert.Single(result); - - var memory = result[0]; - - Assert.Equal(0.018990106880664825, memory.Span[0]); - Assert.Equal(-0.0073809814639389515, memory.Span[1]); - } - - [Fact] - public async Task GenerateEmbeddingsWithDimensionsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient, dimensions: 256); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - await service.GenerateEmbeddingsAsync(["test"]); - - var requestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - var optionsJson = JsonSerializer.Deserialize(requestContent); - - // Assert - Assert.Equal(256, optionsJson.GetProperty("dimensions").GetInt32()); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - #region private - - private HttpResponseMessage SuccessfulResponse - => new(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [ - { - "object": "embedding", - "embedding": [ - 0.018990106880664825, - -0.0073809814639389515 - ], - "index": 0 - } - ], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs deleted file mode 100644 index d20bb502e23d..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment", client, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GetTextContentsWithEmptyChoicesThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{\"id\":\"response-id\",\"object\":\"text_completion\",\"created\":1646932609,\"model\":\"ada\",\"choices\":[]}") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt")); - - Assert.Equal("Text completions not found", exception.Message); - } - - [Theory] - [InlineData(0)] - [InlineData(129)] - public async Task GetTextContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt", settings)); - - Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task GetTextContentsHandlesSettingsCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings - { - MaxTokens = 123, - Temperature = 0.6, - TopP = 0.5, - FrequencyPenalty = 1.6, - PresencePenalty = 1.2, - ResultsPerPrompt = 5, - TokenSelectionBiases = new Dictionary { { 2, 3 } }, - StopSequences = ["stop_sequence"], - TopLogprobs = 5 - }; - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt", settings); - - // Assert - var requestContent = this._messageHandlerStub.RequestContent; - - Assert.NotNull(requestContent); - - var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent)); - - Assert.Equal("Prompt", content.GetProperty("prompt")[0].GetString()); - Assert.Equal(123, content.GetProperty("max_tokens").GetInt32()); - Assert.Equal(0.6, content.GetProperty("temperature").GetDouble()); - Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); - Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); - Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); - Assert.Equal(5, content.GetProperty("n").GetInt32()); - Assert.Equal(5, content.GetProperty("best_of").GetInt32()); - Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); - Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); - Assert.Equal(5, content.GetProperty("logprobs").GetInt32()); - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs deleted file mode 100644 index b8d804c21b5d..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextGenerationService("model-id", "api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new OpenAITextGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextGenerationService("model-id", client); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs deleted file mode 100644 index baa11a265f0a..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextToAudioServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextToAudioServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - Assert.Equal("deployment-name", service.Attributes["DeploymentName"]); - } - - [Theory] - [MemberData(nameof(ExecutionSettings))] - public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAITextToAudioExecutionSettings? settings, Type expectedExceptionType) - { - // Arrange - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var exception = await Record.ExceptionAsync(() => service.GetAudioContentsAsync("Some text", settings)); - - // Assert - Assert.NotNull(exception); - Assert.IsType(expectedExceptionType, exception); - } - - [Fact] - public async Task GetAudioContentByDefaultWorksCorrectlyAsync() - { - // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; - - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); - - // Assert - var audioData = result[0].Data!.Value; - Assert.False(audioData.IsEmpty); - Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); - } - - [Theory] - [InlineData(true, "http://local-endpoint")] - [InlineData(false, "https://endpoint")] - public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) - { - // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; - - if (useHttpClientBaseAddress) - { - this._httpClient.BaseAddress = new Uri("http://local-endpoint"); - } - - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); - - // Assert - Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - public static TheoryData ExecutionSettings => new() - { - { new OpenAITextToAudioExecutionSettings(""), typeof(ArgumentException) }, - }; -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs deleted file mode 100644 index 084fa923b2ce..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextToImageServiceTests : IDisposable -{ - private readonly MultipleHttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextToImageServiceTests() - { - this._messageHandlerStub = new MultipleHttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - - var mockLogger = new Mock(); - - mockLogger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true); - - this._mockLoggerFactory.Setup(l => l.CreateLogger(It.IsAny())).Returns(mockLogger.Object); - } - - [Fact] - public async Task ItSupportsOpenAIClientInjectionAsync() - { - // Arrange - using var messageHandlerStub = new HttpMessageHandlerStub(); - using var httpClient = new HttpClient(messageHandlerStub, false); - messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "revised_prompt": "A photo capturing the diversity of the Earth's landscapes.", - "url": "https://dalleprodsec.blob.core.windows.net/private/images/0f20c621-7eb0-449d-87fd-8dd2a3a15fbe/generated_00.png?se=2023-12-15T17%3A36%3A25Z&sig=jd2%2Fa8jOM9NmclrUbOLdRgAxcFDFPezOpG%2BSF82d7zM%3D&ske=2023-12-20T10%3A10%3A28Z&skoid=e52d5ed7-0657-4f62-bc12-7e5dbb260a96&sks=b&skt=2023-12-13T10%3A10%3A28Z&sktid=33e01921-4d64-4f8c-a055-5bdaffd5e33d&skv=2020-10-02&sp=r&spr=https&sr=b&sv=2020-10-02" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - var clientOptions = new OpenAIClientOptions - { - Transport = new HttpClientTransport(httpClient), - }; - var openAIClient = new OpenAIClient(new Uri("https://az.com"), new Azure.AzureKeyCredential("NOKEY"), clientOptions); - - var textToImageCompletion = new AzureOpenAITextToImageService(deploymentName: "gpt-35-turbo", openAIClient, modelId: "gpt-3.5-turbo"); - - // Act - var result = await textToImageCompletion.GenerateImageAsync("anything", 1024, 1024); - - // Assert - Assert.NotNull(result); - } - - [Theory] - [InlineData(1024, 1024, null)] - [InlineData(1792, 1024, null)] - [InlineData(1024, 1792, null)] - [InlineData(512, 512, typeof(NotSupportedException))] - [InlineData(256, 256, typeof(NotSupportedException))] - [InlineData(123, 456, typeof(NotSupportedException))] - public async Task ItValidatesTheModelIdAsync(int width, int height, Type? expectedExceptionType) - { - // Arrange - using var messageHandlerStub = new HttpMessageHandlerStub(); - using var httpClient = new HttpClient(messageHandlerStub, false); - messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "revised_prompt": "A photo capturing the diversity of the Earth's landscapes.", - "url": "https://dalleprodsec.blob.core.windows.net/private/images/0f20c621-7eb0-449d-87fd-8dd2a3a15fbe/generated_00.png?se=2023-12-15T17%3A36%3A25Z&sig=jd2%2Fa8jOM9NmclrUbOLdRgAxcFDFPezOpG%2BSF82d7zM%3D&ske=2023-12-20T10%3A10%3A28Z&skoid=e52d5ed7-0657-4f62-bc12-7e5dbb260a96&sks=b&skt=2023-12-13T10%3A10%3A28Z&sktid=33e01921-4d64-4f8c-a055-5bdaffd5e33d&skv=2020-10-02&sp=r&spr=https&sr=b&sv=2020-10-02" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - - var textToImageCompletion = new AzureOpenAITextToImageService(deploymentName: "gpt-35-turbo", modelId: "gpt-3.5-turbo", endpoint: "https://az.com", apiKey: "NOKEY", httpClient: httpClient); - - if (expectedExceptionType is not null) - { - await Assert.ThrowsAsync(expectedExceptionType, () => textToImageCompletion.GenerateImageAsync("anything", width, height)); - } - else - { - // Act - var result = await textToImageCompletion.GenerateImageAsync("anything", width, height); - - // Assert - Assert.NotNull(result); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData("gpt-35-turbo", "gpt-3.5-turbo")] - [InlineData("gpt-35-turbo", null)] - [InlineData("gpt-4-turbo", "gpt-4")] - public void ItHasPropertiesAsDefined(string deploymentName, string? modelId) - { - var service = new AzureOpenAITextToImageService(deploymentName, "https://az.com", "NOKEY", modelId); - Assert.Contains(AzureOpenAITextToImageService.DeploymentNameKey, service.Attributes); - Assert.Equal(deploymentName, service.Attributes[AzureOpenAITextToImageService.DeploymentNameKey]); - - if (modelId is null) - { - return; - } - - Assert.Contains(AIServiceExtensions.ModelIdKey, service.Attributes); - Assert.Equal(modelId, service.Attributes[AIServiceExtensions.ModelIdKey]); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs deleted file mode 100644 index 1f31ec076edd..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextToImageServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextToImageServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextToImageService("api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextToImageService("api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("organization", service.Attributes["Organization"]); - Assert.False(service.Attributes.ContainsKey("ModelId")); - } - - [Theory] - [InlineData(123, 456, true)] - [InlineData(256, 512, true)] - [InlineData(256, 256, false)] - [InlineData(512, 512, false)] - [InlineData(1024, 1024, false)] - public async Task GenerateImageWorksCorrectlyAsync(int width, int height, bool expectedException) - { - // Arrange - var service = new OpenAITextToImageService("api-key", "organization", "dall-e-3", this._httpClient); - Assert.Equal("dall-e-3", service.Attributes["ModelId"]); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "url": "https://image-url" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - if (expectedException) - { - await Assert.ThrowsAsync(() => service.GenerateImageAsync("description", width, height)); - } - else - { - var result = await service.GenerateImageAsync("description", width, height); - - Assert.Equal("https://image-url", result); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Experimental/Agents/Experimental.Agents.csproj b/dotnet/src/Experimental/Agents/Experimental.Agents.csproj index b5038dbabde9..648d6b7fd02f 100644 --- a/dotnet/src/Experimental/Agents/Experimental.Agents.csproj +++ b/dotnet/src/Experimental/Agents/Experimental.Agents.csproj @@ -20,7 +20,7 @@ - + diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs index aa4f324490d8..c099f7d609e4 100644 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs @@ -4,7 +4,6 @@ using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; using Microsoft.SemanticKernel.Experimental.Agents.Internal; using Microsoft.SemanticKernel.Http; @@ -92,7 +91,7 @@ private static void AddHeaders(this HttpRequestMessage request, OpenAIRestContex { request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); request.Headers.Add(HeaderNameUserAgent, HttpHeaderConstant.Values.UserAgent); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIFileService))); + request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(IAgent))); if (context.HasVersion) { diff --git a/dotnet/src/Experimental/Agents/Internal/ChatRun.cs b/dotnet/src/Experimental/Agents/Internal/ChatRun.cs index 1928f219c903..218ef3e3ddfc 100644 --- a/dotnet/src/Experimental/Agents/Internal/ChatRun.cs +++ b/dotnet/src/Experimental/Agents/Internal/ChatRun.cs @@ -163,13 +163,12 @@ private IEnumerable> ExecuteStep(ThreadRunStepModel step, private async Task ProcessFunctionStepAsync(string callId, ThreadRunStepModel.FunctionDetailsModel functionDetails, CancellationToken cancellationToken) { var result = await InvokeFunctionCallAsync().ConfigureAwait(false); - var toolResult = result as string ?? JsonSerializer.Serialize(result); return new ToolResultModel { CallId = callId, - Output = toolResult!, + Output = ParseFunctionResult(result), }; async Task InvokeFunctionCallAsync() @@ -191,4 +190,19 @@ async Task InvokeFunctionCallAsync() return result.GetValue() ?? string.Empty; } } + + private static string ParseFunctionResult(object result) + { + if (result is string stringResult) + { + return stringResult; + } + + if (result is ChatMessageContent messageResult) + { + return messageResult.Content ?? JsonSerializer.Serialize(messageResult); + } + + return JsonSerializer.Serialize(result); + } } diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj index a3f5a93a7013..6fdfb01ffa75 100644 --- a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj @@ -28,7 +28,7 @@ - + diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj index aa6f9eb848c8..d5e3b2fc9e4b 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj @@ -9,6 +9,10 @@ true + + rc + + diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj b/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj index b730d1c27025..74e77f9544fa 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj @@ -27,8 +27,8 @@ - + diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs index 148e5a1d28b2..a8fb27bb3a52 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs @@ -60,8 +60,8 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettings() // Assert Assert.NotNull(executionSettings); Assert.Equal("gpt-35-turbo", executionSettings.ModelId); - Assert.Equal(1.0, executionSettings.Temperature); - Assert.Equal(1.0, executionSettings.TopP); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); Assert.Null(executionSettings.StopSequences); Assert.Null(executionSettings.ResponseFormat); Assert.Null(executionSettings.TokenSelectionBiases); diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty index a6be798dbf1a..ba095afeebfc 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty @@ -11,6 +11,7 @@ model: parameters: temperature: 0.0 max_tokens: 3000 + top_p: 1.0 response_format: type: json_object diff --git a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj index 178dd4860a24..95f3ddc30b1d 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj +++ b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj @@ -54,7 +54,7 @@ - + diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs index 467d951a902f..0a9099a34d8e 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs @@ -8,6 +8,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Plugins.OpenApi; using Microsoft.SemanticKernel.TextGeneration; @@ -568,7 +569,7 @@ public void ItBuildsServicesIntoKernel() { var builder = Kernel.CreateBuilder() .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") - .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); builder.Services.AddSingleton(CultureInfo.InvariantCulture); builder.Services.AddSingleton(CultureInfo.CurrentCulture); @@ -577,10 +578,10 @@ public void ItBuildsServicesIntoKernel() Kernel kernel = builder.Build(); Assert.IsType(kernel.GetRequiredService("openai")); - Assert.IsType(kernel.GetRequiredService("azureopenai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); Assert.Equal(2, kernel.GetAllServices().Count()); - Assert.Single(kernel.GetAllServices()); + Assert.Equal(2, kernel.GetAllServices().Count()); Assert.Equal(3, kernel.GetAllServices().Count()); } diff --git a/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj index dafc4377b0e0..4b4a5176cb36 100644 --- a/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj +++ b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj @@ -8,6 +8,10 @@ true + + rc + + diff --git a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs index 4fd99b717b5e..33605eed8d93 100644 --- a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs @@ -5,20 +5,18 @@ using System.Threading.Tasks; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; -using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Agents.OpenAI; +namespace SemanticKernel.IntegrationTests.Agents; #pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. -public sealed class ChatCompletionAgentTests(ITestOutputHelper output) : IDisposable +public sealed class ChatCompletionAgentTests() { private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() @@ -42,8 +40,6 @@ public async Task AzureChatCompletionAgentAsync(string input, string expectedAns KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - this._kernelBuilder.Services.AddSingleton(this._logger); - this._kernelBuilder.AddAzureOpenAIChatCompletion( configuration.ChatDeploymentName!, configuration.Endpoint, @@ -94,15 +90,6 @@ public async Task AzureChatCompletionAgentAsync(string input, string expectedAns Assert.Contains(expectedAnswerContains, messages.Single().Content, StringComparison.OrdinalIgnoreCase); } - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - public sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index 20d6dcad9146..0dc1ae952c20 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -4,23 +4,19 @@ using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; -using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Agents.OpenAI; +namespace SemanticKernel.IntegrationTests.Agents; #pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. -public sealed class OpenAIAssistantAgentTests(ITestOutputHelper output) : IDisposable +public sealed class OpenAIAssistantAgentTests { - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) @@ -36,12 +32,12 @@ public sealed class OpenAIAssistantAgentTests(ITestOutputHelper output) : IDispo [InlineData("What is the special soup?", "Clam Chowder")] public async Task OpenAIAssistantAgentTestAsync(string input, string expectedAnswerContains) { - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); + OpenAIConfiguration openAISettings = this._configuration.GetSection("OpenAI").Get()!; + Assert.NotNull(openAISettings); await this.ExecuteAgentAsync( - new(openAIConfiguration.ApiKey), - openAIConfiguration.ModelId, + OpenAIClientProvider.ForOpenAI(openAISettings.ApiKey), + openAISettings.ModelId, input, expectedAnswerContains); } @@ -50,7 +46,7 @@ await this.ExecuteAgentAsync( /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory(Skip = "No supported endpoint configured.")] + [Theory/*(Skip = "No supported endpoint configured.")*/] [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentAsync(string input, string expectedAnswerContains) { @@ -58,22 +54,20 @@ public async Task AzureOpenAIAssistantAgentAsync(string input, string expectedAn Assert.NotNull(azureOpenAIConfiguration); await this.ExecuteAgentAsync( - new(azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.Endpoint), + OpenAIClientProvider.ForAzureOpenAI(azureOpenAIConfiguration.ApiKey, new Uri(azureOpenAIConfiguration.Endpoint)), azureOpenAIConfiguration.ChatDeploymentName!, input, expectedAnswerContains); } private async Task ExecuteAgentAsync( - OpenAIAssistantConfiguration config, + OpenAIClientProvider config, string modelName, string input, string expected) { // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - - Kernel kernel = this._kernelBuilder.Build(); + Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); @@ -82,10 +76,9 @@ private async Task ExecuteAgentAsync( await OpenAIAssistantAgent.CreateAsync( kernel, config, - new() + new(modelName) { Instructions = "Answer questions about the menu.", - ModelId = modelName, }); AgentGroupChat chat = new(); @@ -102,15 +95,6 @@ await OpenAIAssistantAgent.CreateAsync( Assert.Contains(expected, builder.ToString(), StringComparison.OrdinalIgnoreCase); } - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - public sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs new file mode 100644 index 000000000000..e155f6159c9a --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIAudioToTextTests() +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [RetryFact] + public async Task AzureOpenAIAudioToTextTestAsync() + { + // Arrange + const string Filename = "test_audio.wav"; + + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIAudioToText").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIAudioToText( + azureOpenAIConfiguration.DeploymentName, + azureOpenAIConfiguration.Endpoint, + azureOpenAIConfiguration.ApiKey) + .Build(); + + var service = kernel.GetRequiredService(); + + await using Stream audio = File.OpenRead($"./TestData/{Filename}"); + var audioData = await BinaryData.FromStreamAsync(audio); + + // Act + var result = await service.GetTextContentAsync(new AudioContent(audioData, mimeType: "audio/wav"), new OpenAIAudioToTextExecutionSettings(Filename)); + + // Assert + Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs new file mode 100644 index 000000000000..7d47ee0f45e0 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs @@ -0,0 +1,273 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionTests : BaseIntegrationTest +{ + [Fact] + //[Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] + public async Task ItCanUseAzureOpenAiChatForTextGenerationAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets after '{{$input}}', excluding moons, using bullet points.", + new AzureOpenAIPromptExecutionSettings()); + + // Act + var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" }); + + // Assert + Assert.NotNull(result); + Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task AzureOpenAIStreamingTestAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + StringBuilder fullResult = new(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + } + + // Assert + Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task AzureOpenAIHttpRetryPolicyTestAsync() + { + // Arrange + List statusCodes = []; + + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration!.ChatDeploymentName!, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: "INVALID_KEY"); + + kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + o.Retry.OnRetry = args => + { + statusCodes.Add(args.Outcome.Result?.StatusCode); + return ValueTask.CompletedTask; + }; + }); + }); + + var target = kernelBuilder.Build(); + + var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); + + // Assert + Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s)); + Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode); + } + + [Fact] + public async Task AzureOpenAIShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]); + + // Assert + Assert.NotNull(result.Metadata); + + // Usage + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + } + + [Theory(Skip = "This test is for manual verification.")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + // Act + FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ChatSystemPromptIsNotIgnoredAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); + + // Assert + Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task SemanticKernelVersionHeaderIsSentAsync() + { + // Arrange + using var defaultHandler = new HttpClientHandler(); + using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); + using var httpClient = new HttpClient(httpHeaderHandler); + + var kernel = this.CreateAndInitializeKernel(httpClient); + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); + + // Assert + Assert.NotNull(httpHeaderHandler.RequestHeaders); + Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); + } + + //[Theory(Skip = "This test is for manual verification.")] + [Theory] + [InlineData(null, null)] + [InlineData(false, null)] + [InlineData(true, 2)] + [InlineData(true, 5)] + public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) + { + // Arrange + var settings = new AzureOpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; + + var kernel = this.CreateAndInitializeKernel(); + + // Act + var result = await kernel.InvokePromptAsync("Hi, can you help me today?", new(settings)); + + var logProbabilityInfo = result.Metadata?["ContentTokenLogProbabilities"] as IReadOnlyList; + + // Assert + Assert.NotNull(logProbabilityInfo); + + if (logprobs is true) + { + Assert.NotNull(logProbabilityInfo); + Assert.Equal(topLogprobs, logProbabilityInfo[0].TopLogProbabilities.Count); + } + else + { + Assert.Empty(logProbabilityInfo); + } + } + + #region internals + + private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + Assert.NotNull(azureOpenAIConfiguration.ServiceId); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey, + serviceId: azureOpenAIConfiguration.ServiceId, + httpClient: httpClient); + + return kernelBuilder.Build(); + } + + private const string InputParameterName = "input"; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) + { + public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this.RequestHeaders = request.Headers; + return await base.SendAsync(request, cancellationToken); + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs new file mode 100644 index 000000000000..aec7320867d2 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_FunctionCallingTests.cs @@ -0,0 +1,942 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIChatCompletionFunctionCallingTests : BaseIntegrationTest +{ + [Fact] + public async Task CanAutoInvokeKernelFunctionsAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.FunctionInvocationFilters.Add(filter); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); + + // Assert + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsStreamingAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.FunctionInvocationFilters.Add(filter); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("rain", stringBuilder.ToString(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland. + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithEnumTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("rain", result.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionFromPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var promptFunction = KernelFunctionFactory.CreateFromPrompt( + "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", + functionName: "FindLatestNews", + description: "Searches for the latest news."); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions( + "NewsProvider", + "Delivers up-to-date news content.", + [promptFunction])); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Show me the latest news as they are.", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("Transportation", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var promptFunction = KernelFunctionFactory.CreateFromPrompt( + "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", + functionName: "FindLatestNews", + description: "Searches for the latest news."); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions( + "NewsProvider", + "Delivers up-to-date news content.", + [promptFunction])); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var streamingResult = kernel.InvokePromptStreamingAsync("Show me the latest news as they are.", new(settings)); + + var builder = new StringBuilder(); + + await foreach (var update in streamingResult) + { + builder.Append(update.ToString()); + } + + var result = builder.ToString(); + + // Assert + Assert.NotNull(result); + Assert.Contains("Transportation", result, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Current way of handling function calls manually using connector specific chat message content class. + var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + + while (toolCalls.Count > 0) + { + // Adding LLM function call request to chat history + chatHistory.Add(result); + + // Iterating over the requested function calls and invoking them + foreach (var toolCall in toolCalls) + { + string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ? + JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue()) : + "Unable to find function. Please try again!"; + + // Adding the result of the function call to the chat history + chatHistory.Add(new ChatMessageContent( + AuthorRole.Tool, + content, + metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } })); + } + + // Sending the functions invocation results back to the LLM to get the final response + result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + } + + // Assert + Assert.Contains("rain", result.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var messageContent = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length != 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + var result = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(result.ToChatMessage()); + } + + // Sending the functions invocation results to the LLM to get the final response + messageContent = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.Contains("rain", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length != 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + // Simulating an exception + var exception = new OperationCanceledException("The operation was canceled due to timeout."); + + chatHistory.Add(new FunctionResultContent(functionCall, exception).ToChatMessage()); + } + + // Sending the functions execution results back to the LLM to get the final response + messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.NotNull(messageContent.Content); + TestHelpers.AssertChatErrorExcuseMessage(messageContent.Content); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length > 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + var result = await functionCall.InvokeAsync(kernel); + + chatHistory.AddMessage(AuthorRole.Tool, [result]); + } + + // Adding a simulated function call to the connector response message + var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123"); + messageContent.Items.Add(simulatedFunctionCall); + + // Adding a simulated function result to chat history + var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; + chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); + + // Sending the functions invocation results back to the LLM to get the final response + messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.Contains("tornado", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ItFailsIfNoFunctionResultProvidedAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var result = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + chatHistory.Add(result); + + var exception = await Assert.ThrowsAsync(() => completionService.GetChatMessageContentAsync(chatHistory, settings, kernel)); + + // Assert + Assert.Contains("'tool_calls' must be followed by tool", exception.Message, StringComparison.InvariantCulture); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + var userMessage = chatHistory[0]; + Assert.Equal(AuthorRole.User, userMessage.Role); + + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } + + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); + Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); + + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); + Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); + Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); + Assert.NotNull(getCurrentTimeFunctionCallResult.Result); + + Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); + Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getWeatherForCityFunctionCallResult = getWeatherForCityFunctionCallResultMessage.Items.OfType().Single(); + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallResult.PluginName); + Assert.Equal(getWeatherForCityFunctionCallRequest.Id, getWeatherForCityFunctionCallResult.CallId); + Assert.NotNull(getWeatherForCityFunctionCallResult.Result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + var functionResult = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(functionResult.ToChatMessage()); + } + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + Assert.Contains("rain", result, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var result = new StringBuilder(); + + // Act + await foreach (var contentUpdate in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + result.Append(contentUpdate.Content); + } + + // Assert + var userMessage = chatHistory[0]; + Assert.Equal(AuthorRole.User, userMessage.Role); + + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } + + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); + Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); + + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); + Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); + Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); + Assert.NotNull(getCurrentTimeFunctionCallResult.Result); + + Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); + Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getWeatherForCityFunctionCallResult = getWeatherForCityFunctionCallResultMessage.Items.OfType().Single(); + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallResult.PluginName); + Assert.Equal(getWeatherForCityFunctionCallRequest.Id, getWeatherForCityFunctionCallResult.CallId); + Assert.NotNull(getWeatherForCityFunctionCallResult.Result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + // Simulating an exception + var exception = new OperationCanceledException("The operation was canceled due to timeout."); + + chatHistory.Add(new FunctionResultContent(functionCall, exception).ToChatMessage()); + } + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + TestHelpers.AssertChatErrorExcuseMessage(result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + var functionResult = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(functionResult.ToChatMessage()); + } + + // Adding a simulated function call to the connector response message + var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123"); + fcContent.Items.Add(simulatedFunctionCall); + + // Adding a simulated function result to chat history + var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; + chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + Assert.Contains("tornado", result, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ItShouldSupportOldFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove connector-agnostic function-calling items to check if the old function-calling model, which relies on function information in metadata, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + var index = 0; + while (index < chatMessage.Items.Count) + { + var item = chatMessage.Items[index]; + if (item is FunctionCallContent || item is FunctionResultContent) + { + chatMessage.Items.Remove(item); + continue; + } + index++; + } + } + + string? emailBody = null, emailRecipient = null; + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61\u00B0F", emailBody); + } + + [Fact] + public async Task ItShouldSupportNewFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove metadata related to the old function-calling model to check if the new model, which relies on function call content/result classes, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + if (chatMessage.Metadata is not null) + { + var metadata = new Dictionary(chatMessage.Metadata); + metadata.Remove(OpenAIChatMessageContent.ToolIdProperty); + metadata.Remove("ChatResponseMessage.FunctionToolCalls"); + chatMessage.Metadata = metadata; + } + } + + string? emailBody = null, emailRecipient = null; + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61\u00B0F", emailBody); + } + + /// + /// This test verifies that the connector can handle the scenario where the assistant response message is added to the chat history. + /// The assistant response message with no function calls added to chat history caused the error: HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' + /// + [Fact] + public async Task AssistantResponseAddedToChatHistoryShouldBeHandledCorrectlyAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var assistanceResponse = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + chatHistory.Add(assistanceResponse); // Adding assistance response to chat history. + chatHistory.AddUserMessage("Return only the color name."); + + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + } + + private Kernel CreateAndInitializeKernel(bool importHelperPlugin = false) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + + var kernel = kernelBuilder.Build(); + + if (importHelperPlugin) + { + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + kernel.CreateFunctionFromMethod((string cityName) => + { + return cityName switch + { + "Boston" => "61 and rainy", + _ => "31 and snowing", + }; + }, "Get_Weather_For_City", "Gets the current weather for the specified city"), + kernel.CreateFunctionFromMethod((WeatherParameters parameters) => + { + if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) + { + return Task.FromResult(42.8); // 42.8 Fahrenheit. + } + + throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); + }, "Get_Current_Temperature", "Get current temperature."), + kernel.CreateFunctionFromMethod((double temperatureInFahrenheit) => + { + double temperatureInCelsius = (temperatureInFahrenheit - 32) * 5 / 9; + return Task.FromResult(temperatureInCelsius); + }, "Convert_Temperature_From_Fahrenheit_To_Celsius", "Convert temperature from Fahrenheit to Celsius.") + ]); + } + + return kernel; + } + + public record WeatherParameters(City City); + + public class City + { + public string Name { get; set; } = string.Empty; + public string Country { get; set; } = string.Empty; + } + + private sealed class FakeFunctionFilter : IFunctionInvocationFilter + { + private readonly Func, Task>? _onFunctionInvocation; + + public FakeFunctionFilter( + Func, Task>? onFunctionInvocation = null) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next) => + this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NonStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NonStreamingTests.cs new file mode 100644 index 000000000000..a463410765f5 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NonStreamingTests.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionNonStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await chatCompletion.GetChatMessageContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + // Act + var result = await chatCompletion.GetChatMessageContentAsync(chatHistory, null, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await textGeneration.GetTextContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var result = await textGeneration.GetTextContentAsync("Reply \"I don't know\" to every question. What is the capital of France?", null, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_StreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_StreamingTests.cs new file mode 100644 index 000000000000..5fc0e7e0cad7 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_StreamingTests.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update.Content); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) + { + stringBuilder.Append(update.Content); + + foreach (var key in update.Metadata!.Keys) + { + metadata[key] = update.Metadata[key]; + } + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", null, kernel)) + { + stringBuilder.Append(update); + + foreach (var key in update.Metadata!.Keys) + { + metadata[key] = update.Metadata[key]; + } + } + + // Assert + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs new file mode 100644 index 000000000000..20f9851a5ad7 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextEmbeddingTests +{ + public AzureOpenAITextEmbeddingTests() + { + var config = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); + Assert.NotNull(config); + this._azureOpenAIConfiguration = config; + } + + [Theory] + [InlineData("test sentence")] + public async Task AzureOpenAITestAsync(string testInputString) + { + // Arrange + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( + this._azureOpenAIConfiguration.DeploymentName, + this._azureOpenAIConfiguration.Endpoint, + this._azureOpenAIConfiguration.ApiKey); + + // Act + var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString); + var batchResult = await embeddingGenerator.GenerateEmbeddingsAsync([testInputString]); + + // Assert + Assert.Equal(AdaVectorLength, singleResult.Length); + Assert.Single(batchResult); + } + + [Theory] + [InlineData(null, 3072)] + [InlineData(1024, 1024)] + public async Task AzureOpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength) + { + // Arrange + const string TestInputString = "test sentence"; + + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( + "text-embedding-3-large", + this._azureOpenAIConfiguration.Endpoint, + this._azureOpenAIConfiguration.ApiKey, + dimensions: dimensions); + + // Act + var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString); + + // Assert + Assert.Equal(expectedVectorLength, result.Length); + } + + private readonly AzureOpenAIConfiguration _azureOpenAIConfiguration; + + private const int AdaVectorLength = 1536; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs new file mode 100644 index 000000000000..c50ce2478001 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextToAudio; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextToAudioTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Fact] + public async Task AzureOpenAITextToAudioTestAsync() + { + // Arrange + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToAudio").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextToAudio( + azureOpenAIConfiguration.DeploymentName, + azureOpenAIConfiguration.Endpoint, + azureOpenAIConfiguration.ApiKey) + .Build(); + + var service = kernel.GetRequiredService(); + + // Act + var result = await service.GetAudioContentAsync("The sun rises in the east and sets in the west."); + + // Assert + var audioData = result.Data!.Value; + Assert.False(audioData.IsEmpty); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs new file mode 100644 index 000000000000..1374ed860f2f --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextToImage; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextToImageTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Fact] + public async Task ItCanReturnImageUrlAsync() + { + // Arrange + AzureOpenAIConfiguration? configuration = this._configuration.GetSection("AzureOpenAITextToImage").Get(); + Assert.NotNull(configuration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextToImage(configuration.DeploymentName, configuration.Endpoint, configuration.ApiKey) + .Build(); + + var service = kernel.GetRequiredService(); + + // Act + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); + + // Assert + Assert.NotNull(result); + Assert.StartsWith("https://", result); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs deleted file mode 100644 index b09a7a5ef635..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -/// -/// Enumeration to run integration tests for different AI services -/// -public enum AIServiceType -{ - /// - /// Open AI service - /// - OpenAI = 0, - - /// - /// Azure Open AI service - /// - AzureOpenAI = 1 -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs deleted file mode 100644 index bf102a517e52..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Linq; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -public sealed class ChatHistoryTests(ITestOutputHelper output) : IDisposable -{ - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); - private readonly XunitLogger _logger = new(output); - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - private static readonly JsonSerializerOptions s_jsonOptionsCache = new() { WriteIndented = true }; - - [Fact] - public async Task ItSerializesAndDeserializesChatHistoryAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - ChatHistory history = []; - - // Act - history.AddUserMessage("Make me a special poem"); - var historyBeforeJson = JsonSerializer.Serialize(history.ToList(), s_jsonOptionsCache); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - history.AddUserMessage("Ok thank you"); - - ChatMessageContent resultOriginalWorking = await service.GetChatMessageContentAsync(history, settings, kernel); - var historyJson = JsonSerializer.Serialize(history, s_jsonOptionsCache); - var historyAfterSerialization = JsonSerializer.Deserialize(historyJson); - var exception = await Record.ExceptionAsync(() => service.GetChatMessageContentAsync(historyAfterSerialization!, settings, kernel)); - - // Assert - Assert.Null(exception); - } - - [Fact] - public async Task ItUsesChatSystemPromptFromSettingsAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; - - OpenAIPromptExecutionSettings settings = new() { ChatSystemPrompt = systemPrompt }; - ChatHistory history = []; - - // Act - history.AddUserMessage("Who are you?"); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - - // Assert - Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task ItUsesChatSystemPromptFromChatHistoryAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; - - OpenAIPromptExecutionSettings settings = new(); - ChatHistory history = new(systemPrompt); - - // Act - history.AddUserMessage("Who are you?"); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - - // Assert - Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("Planners:AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - - public class FakePlugin - { - [KernelFunction, Description("creates a special poem")] - public string CreateSpecialPoem() - { - return "ABCDE"; - } - } - - public void Dispose() - { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - this._logger.Dispose(); - } - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs index dd4a55f6cc2c..90375307c533 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs @@ -8,6 +8,7 @@ using Microsoft.SemanticKernel.AudioToText; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; @@ -15,13 +16,13 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAIAudioToTextTests() { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() .Build(); - [Fact(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [RetryFact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] public async Task OpenAIAudioToTextTestAsync() { // Arrange @@ -45,32 +46,4 @@ public async Task OpenAIAudioToTextTestAsync() // Assert Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); } - - [Fact(Skip = "Re-enable when Azure OpenAPI service is available.")] - public async Task AzureOpenAIAudioToTextTestAsync() - { - // Arrange - const string Filename = "test_audio.wav"; - - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIAudioToText").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIAudioToText( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) - .Build(); - - var service = kernel.GetRequiredService(); - - await using Stream audio = File.OpenRead($"./TestData/{Filename}"); - var audioData = await BinaryData.FromStreamAsync(audio); - - // Act - var result = await service.GetTextContentAsync(new AudioContent(audioData, mimeType: "audio/wav"), new OpenAIAudioToTextExecutionSettings(Filename)); - - // Assert - Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs new file mode 100644 index 000000000000..d3941f7d3515 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs @@ -0,0 +1,270 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionTests : BaseIntegrationTest +{ + [Fact] + //[Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] + public async Task ItCanUseOpenAiChatForTextGenerationAsync() + { + // + var kernel = this.CreateAndInitializeKernel(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets after '{{$input}}', excluding moons, using bullet points.", + new OpenAIPromptExecutionSettings()); + + // Act + var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" }); + + // Assert + Assert.NotNull(result); + Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task OpenAIStreamingTestAsync() + { + // + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + StringBuilder fullResult = new(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + } + + // Assert + Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task OpenAIHttpRetryPolicyTestAsync() + { + // + List statusCodes = []; + + var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + Assert.NotNull(openAIConfiguration.ChatModelId); + + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ChatModelId, + apiKey: "INVALID_KEY"); + + kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + o.Retry.OnRetry = args => + { + statusCodes.Add(args.Outcome.Result?.StatusCode); + return ValueTask.CompletedTask; + }; + }); + }); + + var target = kernelBuilder.Build(); + + var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); + + // Assert + Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s)); + Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode); + } + + [Fact] + public async Task OpenAIShouldReturnMetadataAsync() + { + // + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]); + + // Assert + Assert.NotNull(result.Metadata); + + // Usage + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + } + + [Theory(Skip = "This test is for manual verification.")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding) + { + // + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + // Act + FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ChatSystemPromptIsNotIgnoredAsync() + { + // + var kernel = this.CreateAndInitializeKernel(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); + + // Assert + Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task SemanticKernelVersionHeaderIsSentAsync() + { + // + using var defaultHandler = new HttpClientHandler(); + using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); + using var httpClient = new HttpClient(httpHeaderHandler); + + var kernel = this.CreateAndInitializeKernel(httpClient); + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); + + // Assert + Assert.NotNull(httpHeaderHandler.RequestHeaders); + Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); + } + + //[Theory(Skip = "This test is for manual verification.")] + [Theory] + [InlineData(null, null)] + [InlineData(false, null)] + [InlineData(true, 2)] + [InlineData(true, 5)] + public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) + { + // + var settings = new OpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; + + var kernel = this.CreateAndInitializeKernel(); + + // Act + var result = await kernel.InvokePromptAsync("Hi, can you help me today?", new(settings)); + + var logProbabilityInfo = result.Metadata?["ContentTokenLogProbabilities"] as IReadOnlyList; + + // Assert + Assert.NotNull(logProbabilityInfo); + + if (logprobs is true) + { + Assert.NotNull(logProbabilityInfo); + Assert.Equal(topLogprobs, logProbabilityInfo[0].TopLogProbabilities.Count); + } + else + { + Assert.Empty(logProbabilityInfo); + } + } + + #region internals + + private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null) + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId); + Assert.NotNull(OpenAIConfiguration.ApiKey); + Assert.NotNull(OpenAIConfiguration.ServiceId); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey, + serviceId: OpenAIConfiguration.ServiceId, + httpClient: httpClient); + + return kernelBuilder.Build(); + } + + private const string InputParameterName = "input"; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) + { + public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this.RequestHeaders = request.Headers; + return await base.SendAsync(request, cancellationToken); + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs similarity index 67% rename from dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs rename to dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs index 049287fbbc14..5f22dd019ca8 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs @@ -2,58 +2,53 @@ using System; using System.Collections.Generic; -using System.ComponentModel; +using System.IO; using System.Linq; using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Time.Testing; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.Planners.Stepwise; +using OpenAI.Chat; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; -public sealed class OpenAIToolsTests : BaseIntegrationTest +public sealed class OpenAIChatCompletionFunctionCallingTests : BaseIntegrationTest { - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); - var invokedFunctions = new List(); var filter = new FakeFunctionFilter(async (context, next) => { - invokedFunctions.Add(context.Function.Name); + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); await next(context); }); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); kernel.FunctionInvocationFilters.Add(filter); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - var result = await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings)); + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); // Assert - Assert.NotNull(result); - Assert.Contains("GetCurrentUtcTime", invokedFunctions); + Assert.Contains("rain", result.GetValue(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsStreamingAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); - var invokedFunctions = new List(); var filter = new FakeFunctionFilter(async (context, next) => @@ -62,34 +57,34 @@ public async Task CanAutoInvokeKernelFunctionsStreamingAsync() await next(context); }); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); kernel.FunctionInvocationFilters.Add(filter); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - string result = ""; - await foreach (string c in kernel.InvokePromptStreamingAsync( - $"How much older is John than Jim? Compute that value and pass it to the {nameof(TimeInformation)}.{nameof(TimeInformation.InterpretValue)} function, then respond only with its result.", - new(settings))) + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) { - result += c; + stringBuilder.Append(update); } // Assert - Assert.Contains("6", result, StringComparison.InvariantCulture); - Assert.Contains("GetAge([personName, John])", invokedFunctions); - Assert.Contains("GetAge([personName, Jim])", invokedFunctions); - Assert.Contains("InterpretValue([value, 3])", invokedFunctions); + Assert.Contains("rain", stringBuilder.ToString(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings)); // Assert @@ -97,15 +92,15 @@ public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland. } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings)); // Assert @@ -113,32 +108,27 @@ public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithEnumTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - var timeProvider = new FakeTimeProvider(); - timeProvider.SetUtcNow(new DateTimeOffset(new DateTime(2024, 4, 24))); // Wednesday - var timePlugin = new TimePlugin(timeProvider); - kernel.ImportPluginFromObject(timePlugin, nameof(TimePlugin)); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - var result = await kernel.InvokePromptAsync( - "When was last friday? Show the date in format DD.MM.YYYY for example: 15.07.2019", - new(settings)); + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); // Assert Assert.NotNull(result); - Assert.Contains("19.04.2024", result.GetValue(), StringComparison.OrdinalIgnoreCase); + Assert.Contains("rain", result.GetValue(), StringComparison.OrdinalIgnoreCase); } [Fact] public async Task CanAutoInvokeKernelFunctionFromPromptAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); + var kernel = this.CreateAndInitializeKernel(); var promptFunction = KernelFunctionFactory.CreateFromPrompt( "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", @@ -150,8 +140,9 @@ public async Task CanAutoInvokeKernelFunctionFromPromptAsync() "Delivers up-to-date news content.", [promptFunction])); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("Show me the latest news as they are.", new(settings)); // Assert @@ -163,7 +154,7 @@ public async Task CanAutoInvokeKernelFunctionFromPromptAsync() public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); + var kernel = this.CreateAndInitializeKernel(); var promptFunction = KernelFunctionFactory.CreateFromPrompt( "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", @@ -175,8 +166,9 @@ public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() "Delivers up-to-date news content.", [promptFunction])); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var streamingResult = kernel.InvokePromptStreamingAsync("Show me the latest news as they are.", new(settings)); var builder = new StringBuilder(); @@ -197,7 +189,7 @@ public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -210,7 +202,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); // Current way of handling function calls manually using connector specific chat message content class. - var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); while (toolCalls.Count > 0) { @@ -233,7 +225,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu // Sending the functions invocation results back to the LLM to get the final response result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); - toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); } // Assert @@ -244,7 +236,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -280,14 +272,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManual Assert.Contains("rain", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); } - [Fact(Skip = "The test is temporarily disabled until a more stable solution is found.")] + [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); - chatHistory.AddSystemMessage("If you are unable to answer the question for whatever reason, please add the 'error' keyword to the response."); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -321,14 +313,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExc // Assert Assert.NotNull(messageContent.Content); - Assert.Contains("error", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + TestHelpers.AssertChatErrorExcuseMessage(messageContent.Content); } [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); @@ -377,7 +369,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFu public async Task ItFailsIfNoFunctionResultProvidedAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -401,7 +393,7 @@ public async Task ItFailsIfNoFunctionResultProvidedAsync() public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -414,42 +406,72 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); // Assert - Assert.Equal(5, chatHistory.Count); - var userMessage = chatHistory[0]; Assert.Equal(AuthorRole.User, userMessage.Role); - // LLM requested the current time. - var getCurrentTimeFunctionCallRequestMessage = chatHistory[1]; - Assert.Equal(AuthorRole.Assistant, getCurrentTimeFunctionCallRequestMessage.Role); + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } - var getCurrentTimeFunctionCallRequest = getCurrentTimeFunctionCallRequestMessage.Items.OfType().Single(); Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); - // Connector invoked the GetCurrentUtcTime function and added result to chat history. - var getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); Assert.NotNull(getCurrentTimeFunctionCallResult.Result); - // LLM requested the weather for Boston. - var getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; - Assert.Equal(AuthorRole.Assistant, getWeatherForCityFunctionCallRequestMessage.Role); - - var getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); - Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); - Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); - Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); - - // Connector invoked the Get_Weather_For_City function and added result to chat history. - var getWeatherForCityFunctionCallResultMessage = chatHistory[4]; Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. @@ -464,7 +486,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -520,7 +542,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManual public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -538,42 +560,72 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu } // Assert - Assert.Equal(5, chatHistory.Count); - var userMessage = chatHistory[0]; Assert.Equal(AuthorRole.User, userMessage.Role); - // LLM requested the current time. - var getCurrentTimeFunctionCallRequestMessage = chatHistory[1]; - Assert.Equal(AuthorRole.Assistant, getCurrentTimeFunctionCallRequestMessage.Role); + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } - var getCurrentTimeFunctionCallRequest = getCurrentTimeFunctionCallRequestMessage.Items.OfType().Single(); Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); - // Connector invoked the GetCurrentUtcTime function and added result to chat history. - var getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); Assert.NotNull(getCurrentTimeFunctionCallResult.Result); - // LLM requested the weather for Boston. - var getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; - Assert.Equal(AuthorRole.Assistant, getWeatherForCityFunctionCallRequestMessage.Role); - - var getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); - Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); - Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); - Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); - - // Connector invoked the Get_Weather_For_City function and added result to chat history. - var getWeatherForCityFunctionCallResultMessage = chatHistory[4]; Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. @@ -584,18 +636,18 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu Assert.NotNull(getWeatherForCityFunctionCallResult.Result); } - [Fact(Skip = "The test is temporarily disabled until a more stable solution is found.")] + [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; var sut = kernel.GetRequiredService(); var chatHistory = new ChatHistory(); - chatHistory.AddSystemMessage("If you are unable to answer the question for whatever reason, please add the 'error' keyword to the response."); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); string? result = null; @@ -639,14 +691,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExc } // Assert - Assert.Contains("error", result, StringComparison.InvariantCultureIgnoreCase); + TestHelpers.AssertChatErrorExcuseMessage(result); } [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -707,91 +759,154 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFu Assert.Contains("tornado", result, StringComparison.InvariantCultureIgnoreCase); } - private Kernel InitializeKernel(bool importHelperPlugin = false) + [Fact] + public async Task ItShouldSupportOldFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() { - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("Planners:OpenAI").Get(); - Assert.NotNull(openAIConfiguration); + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); - IKernelBuilder builder = this.CreateKernelBuilder() - .AddOpenAIChatCompletion( - modelId: openAIConfiguration.ModelId, - apiKey: openAIConfiguration.ApiKey); + // Remove connector-agnostic function-calling items to check if the old function-calling model, which relies on function information in metadata, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + var index = 0; + while (index < chatMessage.Items.Count) + { + var item = chatMessage.Items[index]; + if (item is FunctionCallContent || item is FunctionResultContent) + { + chatMessage.Items.Remove(item); + continue; + } + index++; + } + } - var kernel = builder.Build(); + string? emailBody = null, emailRecipient = null; - if (importHelperPlugin) + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61\u00B0F", emailBody); + } + + [Fact] + public async Task ItShouldSupportNewFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove metadata related to the old function-calling model to check if the new model, which relies on function call content/result classes, is handled correctly. + foreach (var chatMessage in chatHistory!) { - kernel.ImportPluginFromFunctions("HelperFunctions", - [ - kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), - kernel.CreateFunctionFromMethod((string cityName) => - cityName switch - { - "Boston" => "61 and rainy", - _ => "31 and snowing", - }, "Get_Weather_For_City", "Gets the current weather for the specified city"), - ]); + if (chatMessage.Metadata is not null) + { + var metadata = new Dictionary(chatMessage.Metadata); + metadata.Remove(OpenAIChatMessageContent.ToolIdProperty); + metadata.Remove("ChatResponseMessage.FunctionToolCalls"); + chatMessage.Metadata = metadata; + } } - return kernel; - } + string? emailBody = null, emailRecipient = null; - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61\u00B0F", emailBody); + } /// - /// A plugin that returns the current time. + /// This test verifies that the connector can handle the scenario where the assistant response message is added to the chat history. + /// The assistant response message with no function calls added to chat history caused the error: HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' /// - public class TimeInformation + [Fact] + public async Task AssistantResponseAddedToChatHistoryShouldBeHandledCorrectlyAsync() { - [KernelFunction] - [Description("Retrieves the current time in UTC.")] - public string GetCurrentUtcTime() => DateTime.UtcNow.ToString("R"); + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - [KernelFunction] - [Description("Gets the age of the specified person.")] - public int GetAge(string personName) - { - if ("John".Equals(personName, StringComparison.OrdinalIgnoreCase)) - { - return 33; - } + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); - if ("Jim".Equals(personName, StringComparison.OrdinalIgnoreCase)) - { - return 30; - } + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - return -1; - } + var sut = kernel.GetRequiredService(); + + // Act + var assistanceResponse = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); - [KernelFunction] - public int InterpretValue(int value) => value * 2; + chatHistory.Add(assistanceResponse); // Adding assistance response to chat history. + chatHistory.AddUserMessage("Return only the color name."); + + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); } - public class WeatherPlugin + private Kernel CreateAndInitializeKernel(bool importHelperPlugin = false) { - [KernelFunction, Description("Get current temperature.")] - public Task GetCurrentTemperatureAsync(WeatherParameters parameters) - { - if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) - { - return Task.FromResult(42.8); // 42.8 Fahrenheit. - } + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); - throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); - } + var kernelBuilder = base.CreateKernelBuilder(); - [KernelFunction, Description("Convert temperature from Fahrenheit to Celsius.")] - public Task ConvertTemperatureAsync(double temperatureInFahrenheit) + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + var kernel = kernelBuilder.Build(); + + if (importHelperPlugin) { - double temperatureInCelsius = (temperatureInFahrenheit - 32) * 5 / 9; - return Task.FromResult(temperatureInCelsius); + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + kernel.CreateFunctionFromMethod((string cityName) => + { + return cityName switch + { + "Boston" => "61 and rainy", + _ => "31 and snowing", + }; + }, "Get_Weather_For_City", "Gets the current weather for the specified city"), + kernel.CreateFunctionFromMethod((WeatherParameters parameters) => + { + if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) + { + return Task.FromResult(42.8); // 42.8 Fahrenheit. + } + + throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); + }, "Get_Current_Temperature", "Get current temperature."), + kernel.CreateFunctionFromMethod((double temperatureInFahrenheit) => + { + double temperatureInCelsius = (temperatureInFahrenheit - 32) * 5 / 9; + return Task.FromResult(temperatureInCelsius); + }, "Convert_Temperature_From_Fahrenheit_To_Celsius", "Convert temperature from Fahrenheit to Celsius.") + ]); } + + return kernel; } public record WeatherParameters(City City); @@ -802,8 +917,6 @@ public class City public string Country { get; set; } = string.Empty; } - #region private - private sealed class FakeFunctionFilter : IFunctionInvocationFilter { private readonly Func, Task>? _onFunctionInvocation; @@ -818,36 +931,10 @@ public Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func dateMatchingLastDayName 'Tuesday' => Tuesday, 16 May, 2023")] - public string DateMatchingLastDayName( - [Description("The day name to match")] DayOfWeek input, - IFormatProvider? formatProvider = null) - { - DateTimeOffset dateTime = this._timeProvider.GetUtcNow(); - - // Walk backwards from the previous day for up to a week to find the matching day - for (int i = 1; i <= 7; ++i) - { - dateTime = dateTime.AddDays(-1); - if (dateTime.DayOfWeek == input) - { - break; - } - } - - return dateTime.ToString("D", formatProvider); - } - } + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs new file mode 100644 index 000000000000..4d8f3ac7914d --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionNonStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await chatCompletion.GetChatMessageContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + // Act + var result = await chatCompletion.GetChatMessageContentAsync(chatHistory, null, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await textGeneration.GetTextContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var result = await textGeneration.GetTextContentAsync("Reply \"I don't know\" to every question. What is the capital of France?", null, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs new file mode 100644 index 000000000000..1c09606cf932 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update.Content); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) + { + stringBuilder.Append(update.Content); + + foreach (var key in update.Metadata!.Keys) + { + if (!metadata.TryGetValue(key, out var value) || value is null) + { + metadata[key] = update.Metadata[key]; + } + } + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", null, kernel)) + { + stringBuilder.Append(update); + + foreach (var key in update.Metadata!.Keys) + { + if (!metadata.TryGetValue(key, out var value) || value is null) + { + metadata[key] = update.Metadata[key]; + } + } + } + + // Assert + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs deleted file mode 100644 index 675661b76d83..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs +++ /dev/null @@ -1,668 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Http.Resilience; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. - -public sealed class OpenAICompletionTests(ITestOutputHelper output) : IDisposable -{ - private const string InputParameterName = "input"; - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - - [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place Market")] - public async Task OpenAITestAsync(string prompt, string expectedAnswerContains) - { - // Arrange - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder - .AddOpenAITextGeneration( - serviceId: openAIConfiguration.ServiceId, - modelId: openAIConfiguration.ModelId, - apiKey: openAIConfiguration.ApiKey) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place Market")] - public async Task OpenAIChatAsTextTestAsync(string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - IKernelBuilder builder = this._kernelBuilder; - - this.ConfigureChatOpenAI(builder); - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] - public async Task CanUseOpenAiChatForTextGenerationAsync() - { - // Note: we use OpenAI Chat Completion and GPT 3.5 Turbo - this._kernelBuilder.Services.AddSingleton(this._logger); - IKernelBuilder builder = this._kernelBuilder; - this.ConfigureChatOpenAI(builder); - - Kernel target = builder.Build(); - - var func = target.CreateFunctionFromPrompt( - "List the two planets after '{{$input}}', excluding moons, using bullet points.", - new OpenAIPromptExecutionSettings()); - - var result = await func.InvokeAsync(target, new() { [InputParameterName] = "Jupiter" }); - - Assert.NotNull(result); - Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); - Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); - } - - [Theory] - [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - public async Task AzureOpenAIStreamingTestAsync(bool useChatModel, string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(builder); - } - else - { - this.ConfigureAzureOpenAI(builder); - } - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - StringBuilder fullResult = new(); - // Act - await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) - { - if (content is StreamingChatMessageContent messageContent) - { - Assert.NotNull(messageContent.Role); - } - - fullResult.Append(content); - } - - // Assert - Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Theory] - [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - public async Task AzureOpenAITestAsync(bool useChatModel, string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(builder); - } - else - { - this.ConfigureAzureOpenAI(builder); - } - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] - public async Task OpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) - { - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - this._kernelBuilder - .AddOpenAITextGeneration( - serviceId: openAIConfiguration.ServiceId, - modelId: openAIConfiguration.ModelId, - apiKey: "INVALID_KEY"); // Use an invalid API key to force a 401 Unauthorized response - this._kernelBuilder.Services.ConfigureHttpClientDefaults(c => - { - // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example - c.AddStandardResilienceHandler().Configure(o => - { - o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); - }); - }); - Kernel target = this._kernelBuilder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); - - // Assert - Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); - } - - // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] - public async Task AzureOpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) - { - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - IKernelBuilder builder = this._kernelBuilder; - - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - // Use an invalid API key to force a 401 Unauthorized response - builder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "INVALID_KEY"); - - builder.Services.ConfigureHttpClientDefaults(c => - { - // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example - c.AddStandardResilienceHandler().Configure(o => - { - o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); - }); - }); - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); - - // Assert - Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); - } - - [Theory] - [InlineData(false)] - [InlineData(true)] - public async Task AzureOpenAIShouldReturnMetadataAsync(bool useChatModel) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(this._kernelBuilder); - } - else - { - this.ConfigureAzureOpenAI(this._kernelBuilder); - } - - var kernel = this._kernelBuilder.Build(); - - var plugin = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); - - // Act - var result = await kernel.InvokeAsync(plugin["FunPlugin"]["Limerick"]); - - // Assert - Assert.NotNull(result.Metadata); - - // Usage - Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); - Assert.NotNull(usageObject); - - var jsonObject = JsonSerializer.SerializeToElement(usageObject); - Assert.True(jsonObject.TryGetProperty("PromptTokens", out JsonElement promptTokensJson)); - Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); - Assert.NotEqual(0, promptTokens); - - Assert.True(jsonObject.TryGetProperty("CompletionTokens", out JsonElement completionTokensJson)); - Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); - Assert.NotEqual(0, completionTokens); - - // ContentFilterResults - Assert.True(result.Metadata.ContainsKey("ContentFilterResults")); - } - - [Fact] - public async Task OpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() - { - // Arrange - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - // Use an invalid API key to force a 401 Unauthorized response - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder - .AddOpenAITextGeneration( - modelId: openAIConfiguration.ModelId, - apiKey: "INVALID_KEY", - serviceId: openAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); - - Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); - } - - [Fact] - public async Task AzureOpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() - { - // Arrange - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - Kernel target = this._kernelBuilder - .AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "INVALID_KEY", - serviceId: azureOpenAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); - - Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); - } - - [Fact] - public async Task AzureOpenAIHttpExceededMaxTokensShouldReturnErrorDetailAsync() - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - // Arrange - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - Kernel target = this._kernelBuilder - .AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - // Assert - await Assert.ThrowsAsync(() => plugins["SummarizePlugin"]["Summarize"].InvokeAsync(target, new() { [InputParameterName] = string.Join('.', Enumerable.Range(1, 40000)) })); - } - - [Theory(Skip = "This test is for manual verification.")] - [InlineData("\n", AIServiceType.OpenAI)] - [InlineData("\r\n", AIServiceType.OpenAI)] - [InlineData("\n", AIServiceType.AzureOpenAI)] - [InlineData("\r\n", AIServiceType.AzureOpenAI)] - public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding, AIServiceType service) - { - // Arrange - var prompt = - "Given a json input and a request. Apply the request on the json input and return the result. " + - $"Put the result in between tags{lineEnding}" + - $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; - - const string ExpectedAnswerContains = "John"; - - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder.Build(); - - this._serviceConfiguration[service](target); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task AzureOpenAIInvokePromptTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - - // Act - FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); - - // Assert - Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase); - Assert.NotNull(actual.Metadata); - } - - [Fact] - public async Task AzureOpenAIInvokePromptWithMultipleResultsTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - - var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 150, ResultsPerPrompt = 3 }; - - // Act - FunctionResult actual = await target.InvokePromptAsync(prompt, new(executionSettings)); - - // Assert - Assert.Null(actual.Metadata); - - var chatMessageContents = actual.GetValue>(); - - Assert.NotNull(chatMessageContents); - Assert.Equal(executionSettings.ResultsPerPrompt, chatMessageContents.Count); - - foreach (var chatMessageContent in chatMessageContents) - { - Assert.NotNull(chatMessageContent.Metadata); - Assert.Contains("Pike Place", chatMessageContent.Content, StringComparison.OrdinalIgnoreCase); - } - } - - [Fact] - public async Task AzureOpenAIDefaultValueTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugin = TestHelpers.ImportSamplePlugins(target, "FunPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugin["FunPlugin"]["Limerick"]); - - // Assert - Assert.Contains("Bob", actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task MultipleServiceLoadPromptConfigTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - this.ConfigureInvalidAzureOpenAI(builder); - - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - var defaultPromptModel = new PromptTemplateConfig(prompt) { Name = "FishMarket1" }; - var azurePromptModel = PromptTemplateConfig.FromJson(""" - { - "name": "FishMarket2", - "execution_settings": { - "azure-gpt-35-turbo-instruct": { - "max_tokens": 256 - } - } - } - """); - azurePromptModel.Template = prompt; - - var defaultFunc = target.CreateFunctionFromPrompt(defaultPromptModel); - var azureFunc = target.CreateFunctionFromPrompt(azurePromptModel); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(defaultFunc)); - - FunctionResult azureResult = await target.InvokeAsync(azureFunc); - - // Assert - Assert.Contains("Pike Place", azureResult.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task ChatSystemPromptIsNotIgnoredAsync() - { - // Arrange - var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; - - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); - - // Assert - Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task SemanticKernelVersionHeaderIsSentAsync() - { - // Arrange - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - using var defaultHandler = new HttpClientHandler(); - using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); - using var httpClient = new HttpClient(httpHeaderHandler); - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - builder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId, - httpClient: httpClient); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); - - // Assert - Assert.NotNull(httpHeaderHandler.RequestHeaders); - Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); - } - - [Theory(Skip = "This test is for manual verification.")] - [InlineData(null, null)] - [InlineData(false, null)] - [InlineData(true, 2)] - [InlineData(true, 5)] - public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) - { - // Arrange - var settings = new OpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; - - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Hi, can you help me today?", new(settings)); - - var logProbabilityInfo = result.Metadata?["LogProbabilityInfo"] as ChatChoiceLogProbabilityInfo; - - // Assert - if (logprobs is true) - { - Assert.NotNull(logProbabilityInfo); - Assert.Equal(topLogprobs, logProbabilityInfo.TokenLogProbabilityResults[0].TopLogProbabilityEntries.Count); - } - else - { - Assert.Null(logProbabilityInfo); - } - } - - #region internals - - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - private readonly Dictionary> _serviceConfiguration = []; - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - - private void ConfigureChatOpenAI(IKernelBuilder kernelBuilder) - { - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - - Assert.NotNull(openAIConfiguration); - Assert.NotNull(openAIConfiguration.ChatModelId); - Assert.NotNull(openAIConfiguration.ApiKey); - Assert.NotNull(openAIConfiguration.ServiceId); - - kernelBuilder.AddOpenAIChatCompletion( - modelId: openAIConfiguration.ChatModelId, - apiKey: openAIConfiguration.ApiKey, - serviceId: openAIConfiguration.ServiceId); - } - - private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - private void ConfigureInvalidAzureOpenAI(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "invalid-api-key", - serviceId: $"invalid-{azureOpenAIConfiguration.ServiceId}"); - } - - private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - - private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) - { - public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - this.RequestHeaders = request.Headers; - return await base.SendAsync(request, cancellationToken); - } - } - - #endregion -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs index 30b0c3d1115b..b0dc71c09eb7 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs @@ -10,19 +10,19 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; -using Xunit.Abstractions; namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; #pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. -public sealed class OpenAIFileServiceTests(ITestOutputHelper output) : IDisposable +[Obsolete("This class is deprecated and will be removed in a future version.")] +public sealed class OpenAIFileServiceTests { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] @@ -120,15 +120,6 @@ private static async Task> GetFilesAsync #region internals - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - private OpenAIFileService CreateOpenAIFileService() { var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); @@ -137,7 +128,7 @@ private OpenAIFileService CreateOpenAIFileService() Assert.NotNull(openAIConfiguration.ApiKey); Assert.NotNull(openAIConfiguration.ServiceId); - return new(openAIConfiguration.ApiKey, openAIConfiguration.ServiceId, loggerFactory: this._logger); + return new(openAIConfiguration.ApiKey, openAIConfiguration.ServiceId); } private OpenAIFileService CreateAzureOpenAIFileService() @@ -149,7 +140,7 @@ private OpenAIFileService CreateAzureOpenAIFileService() Assert.NotNull(azureOpenAIConfiguration.ApiKey); Assert.NotNull(azureOpenAIConfiguration.ServiceId); - return new(new Uri(azureOpenAIConfiguration.Endpoint), azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.ServiceId, loggerFactory: this._logger); + return new(new Uri(azureOpenAIConfiguration.Endpoint), azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.ServiceId); } #endregion diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs index 74f63fa3fabd..bccc92bfa0f3 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs @@ -13,7 +13,7 @@ public sealed class OpenAITextEmbeddingTests { private const int AdaVectorLength = 1536; private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() @@ -60,49 +60,4 @@ public async Task OpenAIWithDimensionsAsync(int? dimensions, int expectedVectorL // Assert Assert.Equal(expectedVectorLength, result.Length); } - - [Theory] - [InlineData("test sentence")] - public async Task AzureOpenAITestAsync(string testInputString) - { - // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService(azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey); - - // Act - var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString); - var batchResult = await embeddingGenerator.GenerateEmbeddingsAsync([testInputString, testInputString, testInputString]); - - // Assert - Assert.Equal(AdaVectorLength, singleResult.Length); - Assert.Equal(3, batchResult.Count); - } - - [Theory] - [InlineData(null, 3072)] - [InlineData(1024, 1024)] - public async Task AzureOpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength) - { - // Arrange - const string TestInputString = "test sentence"; - - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( - "text-embedding-3-large", - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey, - dimensions: dimensions); - - // Act - var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString); - - // Assert - Assert.Equal(expectedVectorLength, result.Length); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs index e35c357cf375..c2818abe2502 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs @@ -12,13 +12,13 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAITextToAudioTests { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() .Build(); - [Fact(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [Fact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] public async Task OpenAITextToAudioTestAsync() { // Arrange @@ -38,28 +38,4 @@ public async Task OpenAITextToAudioTestAsync() var audioData = result.Data!.Value; Assert.False(audioData.IsEmpty); } - - [Fact] - public async Task AzureOpenAITextToAudioTestAsync() - { - // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToAudio").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextToAudio( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) - .Build(); - - var service = kernel.GetRequiredService(); - - // Act - var result = await service.GetAudioContentAsync("The sun rises in the east and sets in the west."); - - // Assert - var audioData = result.Data!.Value; - Assert.False(audioData.IsEmpty); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs index e133f91ee547..85512760dcd0 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs @@ -11,72 +11,50 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAITextToImageTests { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); - [Fact(Skip = "This test is for manual verification.")] - public async Task OpenAITextToImageTestAsync() + [Theory(Skip = "This test is for manual verification.")] + [InlineData("dall-e-2", 512, 512)] + [InlineData("dall-e-3", 1024, 1024)] + public async Task OpenAITextToImageByModelTestAsync(string modelId, int width, int height) { // Arrange OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get(); Assert.NotNull(openAIConfiguration); var kernel = Kernel.CreateBuilder() - .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey) + .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: modelId) .Build(); var service = kernel.GetRequiredService(); // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 512, 512); + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", width, height); // Assert Assert.NotNull(result); Assert.NotEmpty(result); } - [Fact(Skip = "This test is for manual verification.")] - public async Task OpenAITextToImageByModelTestAsync() + [Fact] + public async Task OpenAITextToImageUseDallE2ByDefaultAsync() { // Arrange OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get(); Assert.NotNull(openAIConfiguration); var kernel = Kernel.CreateBuilder() - .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: openAIConfiguration.ModelId) + .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: null) .Build(); var service = kernel.GetRequiredService(); // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); - - // Assert - Assert.NotNull(result); - Assert.NotEmpty(result); - } - - [Fact(Skip = "This test is for manual verification.")] - public async Task AzureOpenAITextToImageTestAsync() - { - // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToImage").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextToImage( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) - .Build(); - - var service = kernel.GetRequiredService(); - - // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 256, 256); // Assert Assert.NotNull(result); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json new file mode 100644 index 000000000000..1a85a5330b24 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json @@ -0,0 +1,21 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + }, + { + "content": "Sure! The time in Seattle is currently 3:00 PM.", + "role": "assistant" + }, + { + "content": "What about New York?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json index 397d351c0f50..959c4f62fe15 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json @@ -13,10 +13,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json new file mode 100644 index 000000000000..f9472d3f2da0 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json @@ -0,0 +1,17 @@ +{ + "messages": [ + { + "content": "The current time is Sun, 04 Jun 1989 12:11:13 GMT", + "role": "system" + }, + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json index 571ddbcd55c6..cc0b8acb9f2e 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json @@ -9,10 +9,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs b/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs index bbc55dfabfda..1621ffdfbfa8 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.IO; using System.Net; using System.Net.Http; using System.Text; @@ -39,6 +40,7 @@ internal sealed class KernelRequestTracer : IDisposable ] }"; + private MemoryStream? _memoryDummyResponse; private HttpClient? _httpClient; private HttpMessageHandlerStub? _httpMessageHandlerStub; @@ -134,17 +136,17 @@ private void DisposeHttpResources() { this._httpClient?.Dispose(); this._httpMessageHandlerStub?.Dispose(); + this._memoryDummyResponse?.Dispose(); } private void ResetHttpComponents() { this.DisposeHttpResources(); - + this._memoryDummyResponse = new MemoryStream(Encoding.UTF8.GetBytes(DummyResponse)); this._httpMessageHandlerStub = new HttpMessageHandlerStub(); this._httpMessageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(DummyResponse, - Encoding.UTF8, "application/json") + Content = new StreamContent(this._memoryDummyResponse) }; this._httpClient = new HttpClient(this._httpMessageHandlerStub); } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs index 1e43ec9a4f93..fe12882d2dca 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs @@ -30,15 +30,13 @@ public async Task PromptWithChatRolesAsync(bool isInline, bool isStreaming, stri JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithChatRolesTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithChatRolesStreamingTest.json" + : "./CrossLanguage/Data/PromptWithChatRolesTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); - if (isStreaming) - { - expectedObject["stream"] = true; - } - Assert.True(JsonNode.DeepEquals(obtainedObject, expectedObject)); } } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs index 87fb3e1c888d..b8a9a9b275ea 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs @@ -39,15 +39,13 @@ public async Task PromptWithComplexObjectsAsync(bool isInline, bool isStreaming, JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithComplexObjectsTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json" + : "./CrossLanguage/Data/PromptWithComplexObjectsTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); - if (isStreaming) - { - expectedObject["stream"] = true; - } - Assert.True(JsonNode.DeepEquals(obtainedObject, expectedObject)); } } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs index 12d7166e0bb5..ab192c2429cc 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs @@ -37,7 +37,10 @@ public async Task PromptWithHelperFunctionsAsync(bool isInline, bool isStreaming JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithHelperFunctionsTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json" + : "./CrossLanguage/Data/PromptWithHelperFunctionsTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs index 80fa3bd5ae3e..af23d6b462ea 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs @@ -34,7 +34,10 @@ public async Task PromptWithSimpleVariableAsync(bool isInline, bool isStreaming, JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithSimpleVariableTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json" + : "./CrossLanguage/Data/PromptWithSimpleVariableTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs index d9cfa268ca49..46580dce8135 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs @@ -30,7 +30,10 @@ public async Task SimplePromptAsync(bool isInline, bool isStreaming, string temp JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/SimplePromptTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/SimplePromptStreamingTest.json" + : "./CrossLanguage/Data/SimplePromptTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs index 084bcefbfd5f..8b0805165437 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs @@ -13,11 +13,11 @@ public class YamlPromptTest { [Theory] [InlineData(false, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptTest.json")] - [InlineData(true, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptTest.json")] + [InlineData(true, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptStreamingTest.json")] [InlineData(false, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesTest.json")] - [InlineData(true, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesTest.json")] + [InlineData(true, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesStreamingTest.json")] [InlineData(false, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableTest.json")] - [InlineData(true, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableTest.json")] + [InlineData(true, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json")] public async Task YamlPromptAsync(bool isStreaming, string promptPath, string expectedResultPath) { using var kernelProvider = new KernelRequestTracer(); diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 55a6ac6d1006..5686e8e3e96e 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -18,16 +18,22 @@ + + + + + + @@ -61,6 +67,7 @@ + @@ -68,7 +75,6 @@ - @@ -80,8 +86,9 @@ + - + @@ -103,24 +110,39 @@ PreserveNewest + + PreserveNewest + PreserveNewest PreserveNewest + + PreserveNewest + PreserveNewest + + PreserveNewest + PreserveNewest + + PreserveNewest + PreserveNewest PreserveNewest + + PreserveNewest + PreserveNewest @@ -156,6 +178,10 @@ + + + + Always diff --git a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs index e87bbc8d4813..bae2d4b98742 100644 --- a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs @@ -16,13 +16,13 @@ namespace SemanticKernel.IntegrationTests.Planners.Handlebars; public sealed class HandlebarsPlannerTests { - [Theory] - [InlineData(true, "Write a joke and send it in an e-mail to Kai.", "SendEmail", "test")] - public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string goal, string expectedFunction, string expectedPlugin) + [Theory(Skip = "This test is for manual verification.")] + [InlineData("Write a joke and send it in an e-mail to Kai.", "SendEmail", "test")] + public async Task CreatePlanFunctionFlowAsync(string goal, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + var kernel = this.InitializeKernel(useEmbeddings); kernel.ImportPluginFromType(expectedPlugin); TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); @@ -37,7 +37,7 @@ public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string goal, st ); } - [RetryTheory] + [RetryTheory(Skip = "This test is for manual verification.")] [InlineData("Write a novel about software development that is 3 chapters long.", "NovelChapter", "WriterPlugin")] public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFunction, string expectedPlugin) { @@ -56,8 +56,8 @@ public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFuncti ); } - [Theory] - [InlineData(true, "List each property of the default Qux object.", "## Complex types", """ + [Theory(Skip = "This test is for manual verification.")] + [InlineData("List each property of the default Qux object.", "## Complex types", """ ### Qux: { "type": "Object", @@ -71,11 +71,11 @@ public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFuncti } } """, "GetDefaultQux", "Foo")] - public async Task CreatePlanWithComplexTypesDefinitionsAsync(bool useChatModel, string goal, string expectedSectionHeader, string expectedTypeHeader, string expectedFunction, string expectedPlugin) + public async Task CreatePlanWithComplexTypesDefinitionsAsync(string goal, string expectedSectionHeader, string expectedTypeHeader, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + var kernel = this.InitializeKernel(useEmbeddings); kernel.ImportPluginFromObject(new Foo()); // Act @@ -103,7 +103,7 @@ public async Task CreatePlanWithComplexTypesDefinitionsAsync(bool useChatModel, ); } - private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = true) + private Kernel InitializeKernel(bool useEmbeddings = false) { AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); @@ -113,22 +113,11 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = IKernelBuilder builder = Kernel.CreateBuilder(); - if (useChatModel) - { - builder.Services.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); - } - else - { - builder.Services.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); - } + builder.Services.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); if (useEmbeddings) { diff --git a/dotnet/src/IntegrationTests/PromptTests.cs b/dotnet/src/IntegrationTests/PromptTests.cs index 7b252713d24c..4649b7b47fcd 100644 --- a/dotnet/src/IntegrationTests/PromptTests.cs +++ b/dotnet/src/IntegrationTests/PromptTests.cs @@ -9,7 +9,6 @@ using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; -using SemanticKernel.IntegrationTests.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; @@ -27,7 +26,7 @@ public PromptTests(ITestOutputHelper output) .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); this._kernelBuilder = Kernel.CreateBuilder(); @@ -76,14 +75,13 @@ private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); Assert.NotNull(azureOpenAIConfiguration.Endpoint); Assert.NotNull(azureOpenAIConfiguration.ApiKey); Assert.NotNull(azureOpenAIConfiguration.ServiceId); - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey, serviceId: azureOpenAIConfiguration.ServiceId); diff --git a/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json b/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json new file mode 100644 index 000000000000..7da4cfe721d4 --- /dev/null +++ b/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json @@ -0,0 +1,125 @@ +[ + { + "Role": { + "Label": "user" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "Given the current time of day and weather, what is the likely color of the sky in Boston?" + } + ] + }, + { + "Role": { + "Label": "assistant" + }, + "Items": [ + { + "$type": "FunctionCallContent", + "Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ", + "PluginName": "HelperFunctions", + "FunctionName": "Get_Weather_For_City", + "Arguments": { + "cityName": "Boston" + } + } + ], + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5Qgx7xquKec3tc6lTn27y8Lmkz", + "Created": "2024-07-16T16:13:00+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 23, + "PromptTokens": 196, + "TotalTokens": 219 + }, + "ContentFilterResults": null, + "FinishReason": "tool_calls", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null, + "ChatResponseMessage.FunctionToolCalls": [ + { + "Name": "HelperFunctions-Get_Weather_For_City", + "Arguments": "{\n \u0022cityName\u0022: \u0022Boston\u0022\n}", + "Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + ] + } + }, + { + "Role": { + "Label": "tool" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "61 and rainy", + "Metadata": { + "ChatCompletionsToolCall.Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + }, + { + "$type": "FunctionResultContent", + "CallId": "call_q5FoU2fpfEyZmvC6iqtIXPYQ", + "PluginName": "HelperFunctions", + "FunctionName": "Get_Weather_For_City", + "Result": "61 and rainy" + } + ], + "Metadata": { + "ChatCompletionsToolCall.Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + }, + { + "Role": { + "Label": "assistant" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "Given the current weather in Boston is 61\u00B0F and rainy, the likely color of the sky would be gray or overcast due to the presence of rain clouds.", + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5RibNr9h4bzq7JJjUXj6ITz7wN", + "Created": "2024-07-16T16:13:01+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 34, + "PromptTokens": 237, + "TotalTokens": 271 + }, + "ContentFilterResults": null, + "FinishReason": "stop", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null + } + } + ], + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5RibNr9h4bzq7JJjUXj6ITz7wN", + "Created": "2024-07-16T16:13:01+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 34, + "PromptTokens": 237, + "TotalTokens": 271 + }, + "ContentFilterResults": null, + "FinishReason": "stop", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null + } + } +] \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/TestHelpers.cs b/dotnet/src/IntegrationTests/TestHelpers.cs index e790aa1ca26b..5b42d2884377 100644 --- a/dotnet/src/IntegrationTests/TestHelpers.cs +++ b/dotnet/src/IntegrationTests/TestHelpers.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Reflection; using Microsoft.SemanticKernel; +using Xunit; namespace SemanticKernel.IntegrationTests; @@ -52,4 +53,13 @@ internal static IReadOnlyKernelPluginCollection ImportSamplePromptFunctions(Kern from pluginName in pluginNames select kernel.ImportPluginFromPromptDirectory(Path.Combine(parentDirectory, pluginName))); } + + internal static void AssertChatErrorExcuseMessage(string content) + { + string[] errors = ["error", "difficult", "unable"]; + + var matchesAny = errors.Any(e => content.Contains(e, StringComparison.InvariantCultureIgnoreCase)); + + Assert.True(matchesAny); + } } diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json index fd551e3b5d84..40c064f078c5 100644 --- a/dotnet/src/IntegrationTests/testsettings.json +++ b/dotnet/src/IntegrationTests/testsettings.json @@ -2,12 +2,13 @@ "OpenAI": { "ServiceId": "gpt-3.5-turbo-instruct", "ModelId": "gpt-3.5-turbo-instruct", + "ChatModelId": "gpt-4o", "ApiKey": "" }, "AzureOpenAI": { "ServiceId": "azure-gpt-35-turbo-instruct", "DeploymentName": "gpt-35-turbo-instruct", - "ChatDeploymentName": "gpt-4", + "ChatDeploymentName": "gpt-4o", "Endpoint": "", "ApiKey": "" }, @@ -44,6 +45,17 @@ "Endpoint": "", "ApiKey": "" }, + "OpenAITextToImage": { + "ServiceId": "dall-e-2", + "ModelId": "dall-e-2", + "ApiKey": "" + }, + "AzureOpenAITextToImage": { + "ServiceId": "azure-dalle3", + "DeploymentName": "dall-e-3", + "Endpoint": "", + "ApiKey": "" + }, "HuggingFace": { "ApiKey": "" }, diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs b/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs similarity index 65% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs rename to dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs index 51f99aa1c0cb..feca5e79618c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs +++ b/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs @@ -1,21 +1,22 @@ // Copyright (c) Microsoft. All rights reserved. +using System.ClientModel; +using System.Diagnostics.CodeAnalysis; using System.Net; -using Azure; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel; /// -/// Provides extension methods for the class. +/// Provides extension methods for the class. /// -internal static class RequestFailedExceptionExtensions +[ExcludeFromCodeCoverage] +internal static class ClientResultExceptionExtensions { /// - /// Converts a to an . + /// Converts a to an . /// - /// The original . + /// The original . /// An instance. - public static HttpOperationException ToHttpOperationException(this RequestFailedException exception) + public static HttpOperationException ToHttpOperationException(this ClientResultException exception) { const int NoResponseReceived = 0; @@ -23,7 +24,7 @@ public static HttpOperationException ToHttpOperationException(this RequestFailed try { - responseContent = exception.GetRawResponse()?.Content?.ToString(); + responseContent = exception.GetRawResponse()?.Content.ToString(); } #pragma warning disable CA1031 // Do not catch general exception types catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. diff --git a/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props new file mode 100644 index 000000000000..e865b7fe40e9 --- /dev/null +++ b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs new file mode 100644 index 000000000000..8ee5865edc2c --- /dev/null +++ b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; + +/// +/// Generic action pipeline policy for processing messages. +/// +[ExcludeFromCodeCoverage] +internal sealed class GenericActionPipelinePolicy : PipelinePolicy +{ + private readonly Action _processMessageAction; + + internal GenericActionPipelinePolicy(Action processMessageAction) + { + this._processMessageAction = processMessageAction; + } + + public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) + { + this._processMessageAction(message); + if (currentIndex < pipeline.Count - 1) + { + pipeline[currentIndex + 1].Process(message, pipeline, currentIndex + 1); + } + } + + public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) + { + this._processMessageAction(message); + if (currentIndex < pipeline.Count - 1) + { + await pipeline[currentIndex + 1].ProcessAsync(message, pipeline, currentIndex + 1).ConfigureAwait(false); + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs new file mode 100644 index 000000000000..e86c1b77f4c1 --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.ObjectModel; +using System.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; + +/// +/// Base class for samples that demonstrate the usage of agents. +/// +public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Metadata key to indicate the assistant as created for a sample. + /// + protected const string AssistantSampleMetadataKey = "sksample"; + + /// + /// Metadata to indicate the assistant as created for a sample. + /// + /// + /// While the samples do attempt delete the assistants it creates, it is possible + /// that some assistants may remain. This metadata can be used to identify and sample + /// agents for clean-up. + /// + protected static readonly ReadOnlyDictionary AssistantSampleMetadata = + new(new Dictionary + { + { AssistantSampleMetadataKey, bool.TrueString } + }); + + /// + /// Provide a according to the configuration settings. + /// + protected OpenAIClientProvider GetClientProvider() + => + this.UseOpenAIConfig ? + OpenAIClientProvider.ForOpenAI(this.ApiKey) : + OpenAIClientProvider.ForAzureOpenAI(this.ApiKey, new Uri(this.Endpoint!)); + + /// + /// Common method to write formatted agent chat content to the console. + /// + protected void WriteAgentChatMessage(ChatMessageContent message) + { + // Include ChatMessageContent.AuthorName in output, if present. + string authorExpression = message.Role == AuthorRole.User ? string.Empty : $" - {message.AuthorName ?? "*"}"; + // Include TextContent (via ChatMessageContent.Content), if present. + string contentExpression = string.IsNullOrWhiteSpace(message.Content) ? string.Empty : message.Content; + bool isCode = message.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false; + string codeMarker = isCode ? "\n [CODE]\n" : " "; + Console.WriteLine($"\n# {message.Role}{authorExpression}:{codeMarker}{contentExpression}"); + + // Provide visibility for inner content (that isn't TextContent). + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + Console.WriteLine($" [{item.GetType().Name}] {annotation.Quote}: File #{annotation.FileId}"); + } + else if (item is FileReferenceContent fileReference) + { + Console.WriteLine($" [{item.GetType().Name}] File #{fileReference.FileId}"); + } + else if (item is ImageContent image) + { + Console.WriteLine($" [{item.GetType().Name}] {image.Uri?.ToString() ?? image.DataUri ?? $"{image.Data?.Length} bytes"}"); + } + else if (item is FunctionCallContent functionCall) + { + Console.WriteLine($" [{item.GetType().Name}] {functionCall.Id}"); + } + else if (item is FunctionResultContent functionResult) + { + Console.WriteLine($" [{item.GetType().Name}] {functionResult.CallId}"); + } + } + } + + protected async Task DownloadResponseContentAsync(FileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileContentAsync(client, annotation.FileId!); + } + } + } + + protected async Task DownloadResponseImageAsync(FileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is FileReferenceContent fileReference) + { + await this.DownloadFileContentAsync(client, fileReference.FileId, launchViewer: true); + } + } + } + + private async Task DownloadFileContentAsync(FileClient client, string fileId, bool launchViewer = false) + { + OpenAIFileInfo fileInfo = client.GetFile(fileId); + if (fileInfo.Purpose == OpenAIFilePurpose.AssistantsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await client.DownloadFileAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props b/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props index 0c47e16d8d93..df5205c40a82 100644 --- a/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props +++ b/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props @@ -1,5 +1,8 @@ - + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs index 3425d187e4fd..e091939f0cf3 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs @@ -39,7 +39,7 @@ internal static class ModelDiagnostics /// Start a text completion activity for a given model. /// The activity will be tagged with the a set of attributes specified by the semantic conventions. /// - public static Activity? StartCompletionActivity( + internal static Activity? StartCompletionActivity( Uri? endpoint, string modelName, string modelProvider, @@ -52,7 +52,7 @@ internal static class ModelDiagnostics /// Start a chat completion activity for a given model. /// The activity will be tagged with the a set of attributes specified by the semantic conventions. /// - public static Activity? StartCompletionActivity( + internal static Activity? StartCompletionActivity( Uri? endpoint, string modelName, string modelProvider, @@ -65,20 +65,20 @@ internal static class ModelDiagnostics /// Set the text completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. /// - public static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) + internal static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) => SetCompletionResponse(activity, completions, promptTokens, completionTokens, completions => $"[{string.Join(", ", completions)}]"); /// /// Set the chat completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. /// - public static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) + internal static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) => SetCompletionResponse(activity, completions, promptTokens, completionTokens, ToOpenAIFormat); /// /// Notify the end of streaming for a given activity. /// - public static void EndStreaming( + internal static void EndStreaming( this Activity activity, IEnumerable? contents, IEnumerable? toolCalls = null, @@ -98,7 +98,7 @@ public static void EndStreaming( /// The activity to set the response id /// The response id /// The activity with the response id set for chaining - public static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId); + internal static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId); /// /// Set the prompt token usage for a given activity. @@ -106,7 +106,7 @@ public static void EndStreaming( /// The activity to set the prompt token usage /// The number of prompt tokens used /// The activity with the prompt token usage set for chaining - public static Activity SetPromptTokenUsage(this Activity activity, int promptTokens) => activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens); + internal static Activity SetPromptTokenUsage(this Activity activity, int promptTokens) => activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens); /// /// Set the completion token usage for a given activity. @@ -114,13 +114,13 @@ public static void EndStreaming( /// The activity to set the completion token usage /// The number of completion tokens used /// The activity with the completion token usage set for chaining - public static Activity SetCompletionTokenUsage(this Activity activity, int completionTokens) => activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens); + internal static Activity SetCompletionTokenUsage(this Activity activity, int completionTokens) => activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens); /// /// Check if model diagnostics is enabled /// Model diagnostics is enabled if either EnableModelDiagnostics or EnableSensitiveEvents is set to true and there are listeners. /// - public static bool IsModelDiagnosticsEnabled() + internal static bool IsModelDiagnosticsEnabled() { return (s_enableDiagnostics || s_enableSensitiveEvents) && s_activitySource.HasListeners(); } @@ -129,7 +129,9 @@ public static bool IsModelDiagnosticsEnabled() /// Check if sensitive events are enabled. /// Sensitive events are enabled if EnableSensitiveEvents is set to true and there are listeners. /// - public static bool IsSensitiveEventsEnabled() => s_enableSensitiveEvents && s_activitySource.HasListeners(); + internal static bool IsSensitiveEventsEnabled() => s_enableSensitiveEvents && s_activitySource.HasListeners(); + + internal static bool HasListeners() => s_activitySource.HasListeners(); #region Private private static void AddOptionalTags(Activity? activity, TPromptExecutionSettings? executionSettings) diff --git a/dotnet/src/InternalUtilities/test/AssertExtensions.cs b/dotnet/src/InternalUtilities/test/AssertExtensions.cs index cf201d169366..4caf63589cbc 100644 --- a/dotnet/src/InternalUtilities/test/AssertExtensions.cs +++ b/dotnet/src/InternalUtilities/test/AssertExtensions.cs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using Xunit; +using Assert = Xunit.Assert; namespace SemanticKernel.UnitTests; diff --git a/dotnet/src/InternalUtilities/test/MoqExtensions.cs b/dotnet/src/InternalUtilities/test/MoqExtensions.cs new file mode 100644 index 000000000000..8fb435e288f9 --- /dev/null +++ b/dotnet/src/InternalUtilities/test/MoqExtensions.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Moq; + +#pragma warning disable CS8620 // Argument cannot be used for parameter due to differences in the nullability of reference types. + +internal static class MoqExtensions +{ + public static void VerifyLog(this Mock> logger, LogLevel logLevel, string message, Times times) + { + logger.Verify( + x => x.Log( + It.Is(l => l == logLevel), + It.IsAny(), + It.Is((v, t) => v.ToString()!.Contains(message)), + It.IsAny(), + It.IsAny>()), + times); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs index c4c967445a6b..7370a6eb38ef 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs @@ -16,7 +16,7 @@ public interface ITextToImageService : IAIService /// /// Generate an image matching the given description /// - /// Image description + /// Image generation prompt /// Image width in pixels /// Image height in pixels /// The containing services, plugins, and other state for use throughout the operation. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f9e6f9f3d71f..fd27b35a4b0f 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -44,7 +44,7 @@ public AnnotationContent() /// Initializes a new instance of the class. /// /// The model ID used to generate the content. - /// Inner content, + /// Inner content /// Additional metadata public AnnotationContent( string? modelId = null, diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs index 16ac0cd7828e..925d74d0c731 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs @@ -28,7 +28,7 @@ public FileReferenceContent() /// /// The identifier of the referenced file. /// The model ID used to generate the content. - /// Inner content, + /// Inner content /// Additional metadata public FileReferenceContent( string fileId, diff --git a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj index 81e196b63b91..2c2ed1b1aad1 100644 --- a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj +++ b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj @@ -8,6 +8,10 @@ true + + rc + + diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index 7eeee98743d5..ff9c1e8986c4 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -11,6 +11,10 @@ true + + rc + + diff --git a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj index cd5be49a67cb..86cbde81153c 100644 --- a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj +++ b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj @@ -4,6 +4,9 @@ $(AssemblyName) net8.0;netstandard2.0 + + rc + @@ -13,6 +16,6 @@ Empowers app owners to integrate cutting-edge LLM technology quickly and easily - + - \ No newline at end of file + diff --git a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs new file mode 100644 index 000000000000..f7a4e947ec38 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class ClientResultExceptionExtensionsTests +{ + [Fact] + public void ItCanRecoverFromResponseErrorAndConvertsToHttpOperationExceptionWithDefaultData() + { + // Arrange + var exception = new ClientResultException("message", ClientPipeline.Create().CreateMessage().Response); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Null(httpOperationException.ResponseContent); + Assert.Null(httpOperationException.StatusCode); + } + + [Fact] + public void ItCanProvideResponseContentAndStatusCode() + { + // Arrange + using var pipelineResponse = new MockPipelineResponse(); + + pipelineResponse.SetContent("content"); + pipelineResponse.SetStatus(200); + + var exception = new ClientResultException("message", pipelineResponse); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.NotNull(httpOperationException.StatusCode); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Equal(pipelineResponse.Content.ToString(), httpOperationException.ResponseContent); + Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); + } + + [Fact] + public void ItProvideStatusForResponsesWithoutContent() + { + // Arrange + using var pipelineResponse = new MockPipelineResponse(); + + pipelineResponse.SetStatus(200); + + var exception = new ClientResultException("message", pipelineResponse); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.NotNull(httpOperationException.StatusCode); + Assert.Empty(httpOperationException.ResponseContent!); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs index dc9db68b5836..31ceeac6015a 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs @@ -7,6 +7,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextGeneration; using Xunit; @@ -109,7 +110,7 @@ public void ItBuildsServicesIntoKernel() { var builder = Kernel.CreateBuilder() .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") - .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); builder.Services.AddSingleton(CultureInfo.InvariantCulture); builder.Services.AddSingleton(CultureInfo.CurrentCulture); @@ -118,10 +119,10 @@ public void ItBuildsServicesIntoKernel() Kernel kernel = builder.Build(); Assert.IsType(kernel.GetRequiredService("openai")); - Assert.IsType(kernel.GetRequiredService("azureopenai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); Assert.Equal(2, kernel.GetAllServices().Count()); - Assert.Single(kernel.GetAllServices()); + Assert.Equal(2, kernel.GetAllServices().Count()); Assert.Equal(3, kernel.GetAllServices().Count()); } diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index e929fe1ca82f..af4542f55a2b 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -28,16 +28,18 @@ + - + + diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs new file mode 100644 index 000000000000..ca36f300b1c2 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel.Primitives; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class GenericActionPipelinePolicyTests +{ + [Fact] + public void ItCanBeInstantiated() + { + // Act + var addHeaderRequestPolicy = new GenericActionPipelinePolicy((message) => { }); + + // Assert + Assert.NotNull(addHeaderRequestPolicy); + } + + [Fact] + public void ItProcessAddsHeaderToRequest() + { + // Arrange + var headerName = "headerName"; + var headerValue = "headerValue"; + var sut = new GenericActionPipelinePolicy((message) => { message.Request.Headers.Add(headerName, headerValue); }); + + var pipeline = ClientPipeline.Create(); + var message = pipeline.CreateMessage(); + + // Act + sut.Process(message, [sut], 0); + + // Assert + message.Request.Headers.TryGetValue(headerName, out var value); + Assert.NotNull(value); + Assert.Equal(headerValue, value); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs new file mode 100644 index 000000000000..d147f1c98df1 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class MockPipelineResponse : PipelineResponse +{ + private int _status; + private string _reasonPhrase; + private Stream? _contentStream; + private BinaryData? _bufferedContent; + + private readonly PipelineResponseHeaders _headers; + + private bool _disposed; + + public MockPipelineResponse(int status = 0, string reasonPhrase = "") + { + this._status = status; + this._reasonPhrase = reasonPhrase; + this._headers = new MockResponseHeaders(); + } + + public override int Status => this._status; + + public void SetStatus(int value) => this._status = value; + + public override string ReasonPhrase => this._reasonPhrase; + + public void SetReasonPhrase(string value) => this._reasonPhrase = value; + + public void SetContent(byte[] content) + { + this.ContentStream = new MemoryStream(content, 0, content.Length, false, true); + } + + public MockPipelineResponse SetContent(string content) + { + this.SetContent(Encoding.UTF8.GetBytes(content)); + return this; + } + + public override Stream? ContentStream + { + get => this._contentStream; + set => this._contentStream = value; + } + + public override BinaryData Content + { + get + { + if (this._contentStream is null) + { + return new BinaryData(Array.Empty()); + } + + if (this.ContentStream is not MemoryStream memoryContent) + { + throw new InvalidOperationException("The response is not buffered."); + } + + if (memoryContent.TryGetBuffer(out ArraySegment segment)) + { + return new BinaryData(segment.AsMemory()); + } + return new BinaryData(memoryContent.ToArray()); + } + } + + protected override PipelineResponseHeaders HeadersCore + => this._headers; + + public sealed override void Dispose() + { + this.Dispose(true); + + GC.SuppressFinalize(this); + } + + protected void Dispose(bool disposing) + { + if (disposing && !this._disposed) + { + Stream? content = this._contentStream; + if (content != null) + { + this._contentStream = null; + content.Dispose(); + } + + this._disposed = true; + } + } + + public override BinaryData BufferContent(CancellationToken cancellationToken = default) + { + if (this._bufferedContent is not null) + { + return this._bufferedContent; + } + + if (this._contentStream is null) + { + this._bufferedContent = new BinaryData(Array.Empty()); + return this._bufferedContent; + } + + MemoryStream bufferStream = new(); + this._contentStream.CopyTo(bufferStream); + this._contentStream.Dispose(); + this._contentStream = bufferStream; + + // Less efficient FromStream method called here because it is a mock. + // For intended production implementation, see HttpClientTransportResponse. + this._bufferedContent = BinaryData.FromStream(bufferStream); + return this._bufferedContent; + } + + public override async ValueTask BufferContentAsync(CancellationToken cancellationToken = default) + { + if (this._bufferedContent is not null) + { + return this._bufferedContent; + } + + if (this._contentStream is null) + { + this._bufferedContent = new BinaryData(Array.Empty()); + return this._bufferedContent; + } + + MemoryStream bufferStream = new(); + + await this._contentStream.CopyToAsync(bufferStream, cancellationToken).ConfigureAwait(false); + await this._contentStream.DisposeAsync().ConfigureAwait(false); + + this._contentStream = bufferStream; + + // Less efficient FromStream method called here because it is a mock. + // For intended production implementation, see HttpClientTransportResponse. + this._bufferedContent = BinaryData.FromStream(bufferStream); + return this._bufferedContent; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs new file mode 100644 index 000000000000..01d698512be5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class MockResponseHeaders : PipelineResponseHeaders +{ + private readonly Dictionary _headers; + + public MockResponseHeaders() + { + this._headers = new Dictionary(); + } + + public override IEnumerator> GetEnumerator() + { + throw new NotImplementedException(); + } + + public override bool TryGetValue(string name, out string? value) + { + return this._headers.TryGetValue(name, out value); + } + + public override bool TryGetValues(string name, out IEnumerable? values) + { + throw new NotImplementedException(); + } +} diff --git a/python/Makefile b/python/Makefile index 060ee561bfa1..1e165a1539ba 100644 --- a/python/Makefile +++ b/python/Makefile @@ -71,4 +71,4 @@ clean: rm -rf .venv build: - uvx --from build pyproject-build --installer uv \ No newline at end of file + uvx --from build pyproject-build --installer uv