diff --git a/.github/workflows/copilot-chat-tests.yml b/.github/workflows/copilot-chat-tests.yml index c9059026cd14..0f0d4db293cb 100644 --- a/.github/workflows/copilot-chat-tests.yml +++ b/.github/workflows/copilot-chat-tests.yml @@ -2,7 +2,7 @@ name: Copilot Chat Tests on: workflow_dispatch: push: - branches: ["main", "feature*"] + branches: ["main"] paths: - "samples/apps/copilot-chat-app/**" diff --git a/.github/workflows/dotnet-integration-tests.yml b/.github/workflows/dotnet-integration-tests.yml index 3f665860e0b6..a6919209f1de 100644 --- a/.github/workflows/dotnet-integration-tests.yml +++ b/.github/workflows/dotnet-integration-tests.yml @@ -23,24 +23,24 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 - if: ${{ github.event_name == 'merge_group' }} + if: ${{ github.event_name != 'pull_request' }} with: clean: true - name: Setup .NET uses: actions/setup-dotnet@v3 - if: ${{ github.event_name == 'merge_group' }} + if: ${{ github.event_name != 'pull_request' }} with: dotnet-version: 6.0.x - name: Find projects shell: bash - if: ${{ github.event_name == 'merge_group' }} + if: ${{ github.event_name != 'pull_request' }} run: echo "projects=$(find ./dotnet -type f -name "*IntegrationTests.csproj" | tr '\n' ' ')" >> $GITHUB_ENV - name: Integration Tests shell: bash - if: ${{ github.event_name == 'merge_group' }} + if: ${{ github.event_name != 'pull_request' }} env: # Set Azure credentials secret as an input AzureOpenAI__Label: azure-text-davinci-003 AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002 @@ -62,4 +62,4 @@ jobs: with: name: dotnet-testresults-${{ matrix.configuration }} path: ./TestResults - if: ${{ github.event_name == 'merge_group' && always() }} + if: ${{ github.event_name != 'pull_request' && always() }} diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml index 8c62855400c6..a8507300cd7a 100644 --- a/.github/workflows/python-integration-tests.yml +++ b/.github/workflows/python-integration-tests.yml @@ -38,13 +38,13 @@ jobs: export HNSWLIB_NO_NATIVE=1 python -m pip install --upgrade pip setuptools wheel python -m pip install poetry pytest - cd python && poetry install --with hugging_face --with chromadb --with weaviate + cd python && poetry install - name: Install dependencies with hnswlib native enabled if: matrix.os != 'macos-latest' || matrix.python-version != '3.11' run: | python -m pip install --upgrade pip setuptools wheel python -m pip install poetry pytest - cd python && poetry install --with hugging_face --with chromadb --with weaviate + cd python && poetry install - name: Run Integration Tests shell: bash env: # Set Azure credentials secret as an input @@ -64,6 +64,8 @@ jobs: Pinecone__ApiKey: ${{ secrets.PINECONE__APIKEY }} Pinecone__Environment: ${{ secrets.PINECONE__ENVIRONMENT }} Postgres__Connectionstr: ${{secrets.POSTGRES__CONNECTIONSTR}} + AZURE_COGNITIVE_SEARCH_ADMIN_KEY: ${{secrets.AZURE_COGNITIVE_SEARCH_ADMIN_KEY}} + AZURE_COGNITIVE_SEARCH_ENDPOINT: ${{secrets.AZURE_COGNITIVE_SEARCH_ENDPOINT}} run: | cd python poetry run pytest ./tests/integration diff --git a/.github/workflows/python-unit-tests.yml b/.github/workflows/python-unit-tests.yml index 9b9225b9a102..c125341d0296 100644 --- a/.github/workflows/python-unit-tests.yml +++ b/.github/workflows/python-unit-tests.yml @@ -3,9 +3,9 @@ name: Python Unit Tests on: workflow_dispatch: pull_request: - branches: [ "main", "feature*" ] + branches: ["main", "feature*"] paths: - - 'python/**' + - "python/**" jobs: python-unit-tests: @@ -14,19 +14,19 @@ jobs: fail-fast: false matrix: python-version: ["3.8", "3.9", "3.10", "3.11"] - os: [ ubuntu-latest, windows-latest, macos-latest ] + os: [ubuntu-latest, windows-latest, macos-latest] steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install poetry pytest - cd python - poetry install --without chromadb --without hugging_face - - name: Test with pytest - run: | - cd python && poetry run pytest ./tests/unit + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install poetry pytest + cd python + poetry install --without chromadb --without hugging_face --without azure_cognitive_search --without weaviate --without pinecone --without postgres + - name: Test with pytest + run: | + cd python && poetry run pytest ./tests/unit diff --git a/.gitignore b/.gitignore index 07c766a8acbf..9df6bdb43c5e 100644 --- a/.gitignore +++ b/.gitignore @@ -479,3 +479,6 @@ swa-cli.config.json **/copilot-chat-app/webapp/build **/copilot-chat-app/webapp/node_modules **/copilot-chat-app/webapi/data/eng.traineddata + +# Semantic Kernel Tools +/.semantic-kernel diff --git a/.vscode/launch.json b/.vscode/launch.json index 186e934244e9..83c63b4f5199 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -34,6 +34,14 @@ "name": ".NET Core Attach", "type": "coreclr", "request": "attach" + }, + { + "cwd":"${workspaceFolder}/python", + "name": "Python: Test Module", + "type": "python", + "request": "launch", + "module": "pytest", + "args": ["${file}"], } ] } diff --git a/FEATURE_MATRIX.md b/FEATURE_MATRIX.md index 0bed9866230e..a4459402fb38 100644 --- a/FEATURE_MATRIX.md +++ b/FEATURE_MATRIX.md @@ -1,93 +1,5 @@ # Semantic Kernel Feature Matrix by Language -**Legend** +This document can be found on the Semantic Kernel Documentation site on [Supported Languages.](https://learn.microsoft.com/en-us/semantic-kernel/get-started/supported-languages) - ✅ - Feature implemented - 🔄 - Feature partially implemented (see associated Note column) - ❌ - Feature not implemented - -## AI Services - -| | C# | Python | Java | Notes | -|-----------------------------------|:----:|:------:|:----:|-------| -| Text Generation | ✅ | ✅ | ✅ | Example: text-davinci-003 | -| Text Embeddings | ✅ | ✅ | ✅ | Example: text-embeddings-ada-002 | -| Chat Completion | ✅ | ✅ | ❌ | Example: GPT-4, GPT-3.5-turbo | -| Image Generation | ✅ | ❌ | ❌ | Example: Dall-E 2 | - -## AI Service Endpoints - -| | C# | Python | Java | Notes | -|-----------------------------------|:---:|:------:|:----:|-------| -| OpenAI | ✅ | ✅ | ✅ | | -| Azure OpenAI | ✅ | ✅ | ✅ | | -| Hugging Face Inference API | 🔄 | ❌ | ❌ | Coming soon to Python, not all scenarios are covered for .NET | -| Hugging Face Local | ❌ | ✅ | ❌ | | -| Custom | ✅ | 🔄 | ❌ | Requires to define the service schema in the application | - -## Tokenizers - -| | C# | Python | Java | Notes | -|-----------------------------------|:---:|:------:|:----:|-------| -| GPT2 | ✅ | ✅ | ✅ | | -| GPT3 | ✅ | ❌ | ❌ | | -| tiktoken | ❌ | ❌ | ❌ | Coming soon. Can be added manually to Python via `pip install tiktoken` | - -## Core Skills - -| | C# | Python | Java | Notes | -|-----------------------------------|:---:|:------:|:----:|-------| -| TextMemory Skill | ✅ | ✅ | 🔄 | | -| ConversationSummary Skill | ✅ | ✅ | ❌ | | -| FileIO Skill | ✅ | ✅ | ✅ | | -| Http Skill | ✅ | ✅ | ✅ | | -| Math Skill | ✅ | ✅ | ✅ | | -| Text Skill | ✅ | ✅ | 🔄 | | -| Time Skill | ✅ | ✅ | ✅ | | -| Wait Skill | ✅ | ✅ | ✅ | | - -## Planning - -| | C# | Python | Java | Notes | -|-----------------------------------|:---:|:------:|:----:|-------| -| Plan | ✅ | 🔄 | ❌ | Plan object model to be completed | -| BasicPlanner | ❌ | ✅ | ❌ | | -| ActionPlanner | ✅ | ❌ | 🔄 | | -| SequentialPlanner | ✅ | ❌ | 🔄 | | - -## Memory Connectors, Vector storage - -| | C# | Python | Java | Notes | -|---------------|:---:|:------:|:----:|-------| -| Azure Search | ✅ | 🔄 | ❌ | Azure Cognitive Search coming soon | -| Weaviate | ✅ | ✅ | ❌ | Currently supported on Python 3.9+, 3.8 coming soon | -| Chroma | ✅ | ✅ | ❌ | | -| Qdrant | ✅ | ❌ | ❌ | | -| Pinecone | ✅ | ✅ | ❌ | | -| Milvus | ❌ | ❌ | ❌ | Coming soon | -| Sqlite | ✅ | ❌ | ❌ | Vector optimization requires [sqlite-vss](https://github.com/asg017/sqlite-vss) | -| Postgres | ✅ | ✅ | ❌ | Vector optimization requires [pgvector](https://github.com/pgvector/pgvector) | -| CosmosDB | ✅ | ❌ | ❌ | CosmosDB is not optimized for vector storage | -| Redis | ✅ | ❌ | ❌ | Vector optimization requires [RediSearch](https://redis.io/docs/stack/search) | - -## Connectors and Skill Libraries - -| | C# | Python | Java | Notes | -|---------------------------------------|:---:|:------:|:----:|-------| -| MsGraph | ✅ | ❌ | ❌ | Contains connectors for OneDrive, Outlook, ToDos, and Organization Hierarchies | -| Document and Data Loading Skills | ✅ | ❌ | ❌ | Pdf, csv, docx, pptx. Currently only supports Word documents | -| OpenAPI | ✅ | ❌ | ❌ | | -| Web Search Skills (i.e. Bing, Google) | ✅ | ❌ | ❌ | | -| Text Chunkers | 🔄 | 🔄 | ❌ | | - -# Design Choices - -The overall architecture of the core kernel is consistent across all languages, -however, the code follows common paradigms and style of each language. - -During the initial development phase, many Python best practices have been ignored -in the interest of velocity and feature parity. The project is now going through -a refactoring exercise to increase code quality. - -To make the SDK as lightweight as possible, the core packages have -a minimal set of external dependencies. \ No newline at end of file +To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/get-started/supported-languages.md) \ No newline at end of file diff --git a/README.md b/README.md index 4298994ed035..327f5cedb9cf 100644 --- a/README.md +++ b/README.md @@ -63,7 +63,7 @@ Semantic Kernel is available to explore AI and build apps with C# and Python: -See the [Feature Matrix](FEATURE_MATRIX.md) to see a breakdown of feature parity between C# and Python. +See the [Feature Matrix](https://learn.microsoft.com/en-us/semantic-kernel/get-started/supported-languages) to see a breakdown of feature parity between our currently supported languages. The quickest way to get started with the basics is to get an API key (OpenAI or Azure OpenAI) diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index beb68e08acf2..87ba2e1f7a8d 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -7,13 +7,15 @@ - + + + @@ -29,6 +31,8 @@ + + diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 48df300b8aca..79ae7ae61185 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -35,6 +35,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Directory.Packages.props = Directory.Packages.props ..\.github\workflows\dotnet-format.yml = ..\.github\workflows\dotnet-format.yml ..\README.md = ..\README.md + ..\nuget.config = ..\nuget.config EndProjectSection EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SemanticKernel.UnitTests", "src\SemanticKernel.UnitTests\SemanticKernel.UnitTests.csproj", "{37E39C68-5A40-4E63-9D3C-0C66AD98DFCB}" @@ -140,8 +141,12 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Skills.Core", "src\Skills\S EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NCalcSkills", "samples\NCalcSkills\NCalcSkills.csproj", "{E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AI.Oobabooga", "src\Connectors\Connectors.AI.Oobabooga\Connectors.AI.Oobabooga.csproj", "{677F1381-7830-4115-9C1A-58B282629DC6}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planning.StepwisePlanner", "src\Extensions\Planning.StepwisePlanner\Planning.StepwisePlanner.csproj", "{4762BCAF-E1C5-4714-B88D-E50FA333C50E}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ApplicationInsightsExample", "samples\ApplicationInsightsExample\ApplicationInsightsExample.csproj", "{C754950A-E16C-4F96-9CC7-9328E361B5AF}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -341,12 +346,24 @@ Global {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Publish|Any CPU.ActiveCfg = Release|Any CPU {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Release|Any CPU.ActiveCfg = Release|Any CPU {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Release|Any CPU.Build.0 = Release|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Publish|Any CPU.Build.0 = Publish|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {677F1381-7830-4115-9C1A-58B282629DC6}.Release|Any CPU.Build.0 = Release|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Debug|Any CPU.Build.0 = Debug|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Publish|Any CPU.ActiveCfg = Publish|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Publish|Any CPU.Build.0 = Publish|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Release|Any CPU.ActiveCfg = Release|Any CPU {4762BCAF-E1C5-4714-B88D-E50FA333C50E}.Release|Any CPU.Build.0 = Release|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Publish|Any CPU.Build.0 = Debug|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -396,7 +413,9 @@ Global {1C19D805-3573-4477-BF07-40180FCDE1BD} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA} = {FA3720F1-C99A-49B2-9577-A940257098BF} + {677F1381-7830-4115-9C1A-58B282629DC6} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {4762BCAF-E1C5-4714-B88D-E50FA333C50E} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} + {C754950A-E16C-4F96-9CC7-9328E361B5AF} = {FA3720F1-C99A-49B2-9577-A940257098BF} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/SK-dotnet.sln.DotSettings b/dotnet/SK-dotnet.sln.DotSettings index 94c269cd2a4a..4d5e6137e95a 100644 --- a/dotnet/SK-dotnet.sln.DotSettings +++ b/dotnet/SK-dotnet.sln.DotSettings @@ -202,8 +202,10 @@ public void It$SOMENAME$() True True True + True True True + True True True True diff --git a/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj b/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj new file mode 100644 index 000000000000..13720b96d9a2 --- /dev/null +++ b/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj @@ -0,0 +1,27 @@ + + + + net6.0 + LatestMajor + Exe + 10 + enable + disable + false + + CA1050;CA1707;CA2007;VSTHRD111 + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + + + + + + + + + + + + diff --git a/dotnet/samples/ApplicationInsightsExample/Program.cs b/dotnet/samples/ApplicationInsightsExample/Program.cs new file mode 100644 index 000000000000..112800797fbb --- /dev/null +++ b/dotnet/samples/ApplicationInsightsExample/Program.cs @@ -0,0 +1,192 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Threading.Tasks; +using Microsoft.ApplicationInsights; +using Microsoft.ApplicationInsights.DataContracts; +using Microsoft.ApplicationInsights.Extensibility; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.ApplicationInsights; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning; +using Microsoft.SemanticKernel.Planning.Sequential; + +/// +/// Example of telemetry in Semantic Kernel using Application Insights within console application. +/// +public sealed class Program +{ + /// + /// Log level to be used by . + /// + /// + /// is set by default. + /// will enable logging with more detailed information, including sensitive data. Should not be used in production. + /// + private static LogLevel LogLevel = LogLevel.Information; + + public static async Task Main() + { + var serviceProvider = GetServiceProvider(); + + var telemetryClient = serviceProvider.GetRequiredService(); + var logger = serviceProvider.GetRequiredService>(); + + using var meterListener = new MeterListener(); + using var activityListener = new ActivityListener(); + + ConfigureMetering(meterListener, telemetryClient); + ConfigureTracing(activityListener, telemetryClient); + + var kernel = GetKernel(logger); + var planner = GetPlanner(kernel, logger); + + try + { + using var operation = telemetryClient.StartOperation("ApplicationInsights.Example"); + + Console.WriteLine("Operation/Trace ID:"); + Console.WriteLine(Activity.Current?.TraceId); + + var plan = await planner.CreatePlanAsync("Write a poem about John Doe, then translate it into Italian."); + + Console.WriteLine("Original plan:"); + Console.WriteLine(plan.ToPlanString()); + + var result = await kernel.RunAsync(plan); + + Console.WriteLine("Result:"); + Console.WriteLine(result.Result); + } + finally + { + // Explicitly call Flush() followed by sleep is required in console apps. + // This is to ensure that even if application terminates, telemetry is sent to the back-end. + telemetryClient.Flush(); + await Task.Delay(5000); + } + } + + private static ServiceProvider GetServiceProvider() + { + var services = new ServiceCollection(); + + ConfigureApplicationInsightsTelemetry(services); + + return services.BuildServiceProvider(); + } + + private static void ConfigureApplicationInsightsTelemetry(ServiceCollection services) + { + string instrumentationKey = Env.Var("ApplicationInsights__InstrumentationKey"); + + services.AddLogging(loggingBuilder => + { + loggingBuilder.AddFilter(typeof(Program).FullName, LogLevel); + loggingBuilder.SetMinimumLevel(LogLevel); + }); + + services.AddApplicationInsightsTelemetryWorkerService(options => + { + options.ConnectionString = $"InstrumentationKey={instrumentationKey}"; + }); + } + + private static IKernel GetKernel(ILogger logger) + { + string folder = RepoFiles.SampleSkillsPath(); + + var kernel = new KernelBuilder() + .WithLogger(logger) + .WithAzureChatCompletionService( + Env.Var("AzureOpenAI__ChatDeploymentName"), + Env.Var("AzureOpenAI__Endpoint"), + Env.Var("AzureOpenAI__ApiKey")) + .Build(); + + kernel.ImportSemanticSkillFromDirectory(folder, "SummarizeSkill", "WriterSkill"); + + return kernel; + } + + private static ISequentialPlanner GetPlanner( + IKernel kernel, + ILogger logger, + int maxTokens = 1024) + { + var plannerConfig = new SequentialPlannerConfig { MaxTokens = maxTokens }; + + return new SequentialPlanner(kernel, plannerConfig).WithInstrumentation(logger); + } + + /// + /// Example of metering configuration in Application Insights + /// using to attach for recordings. + /// + /// Instance of for metering configuration. + /// Instance of Application Insights . + private static void ConfigureMetering(MeterListener meterListener, TelemetryClient telemetryClient) + { + meterListener.InstrumentPublished = (instrument, listener) => + { + // Subscribe to all metrics in Semantic Kernel + if (instrument.Meter.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal)) + { + listener.EnableMeasurementEvents(instrument); + } + }; + + MeasurementCallback measurementCallback = (instrument, measurement, tags, state) => + { + telemetryClient.GetMetric(instrument.Name).TrackValue(measurement); + }; + + meterListener.SetMeasurementEventCallback(measurementCallback); + + meterListener.Start(); + } + + /// + /// Example of advanced distributed tracing configuration in Application Insights + /// using to attach for events. + /// + /// Instance of for tracing configuration. + /// Instance of Application Insights . + private static void ConfigureTracing(ActivityListener activityListener, TelemetryClient telemetryClient) + { + var operations = new ConcurrentDictionary>(); + + // For more detailed tracing we need to attach Activity entity to Application Insights operation manually. + Action activityStarted = activity => + { + var operation = telemetryClient.StartOperation(activity); + operation.Telemetry.Type = activity.Kind.ToString(); + + operations.TryAdd(activity.TraceId.ToString(), operation); + }; + + // We also need to manually stop Application Insights operation when Activity entity is stopped. + Action activityStopped = activity => + { + if (operations.TryRemove(activity.TraceId.ToString(), out var operation)) + { + telemetryClient.StopOperation(operation); + } + }; + + // Subscribe to all traces in Semantic Kernel + activityListener.ShouldListenTo = + activitySource => activitySource.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal); + + activityListener.Sample = (ref ActivityCreationOptions _) => ActivitySamplingResult.AllData; + activityListener.SampleUsingParentId = (ref ActivityCreationOptions _) => ActivitySamplingResult.AllData; + activityListener.ActivityStarted = activityStarted; + activityListener.ActivityStopped = activityStopped; + + ActivitySource.AddActivityListener(activityListener); + } +} diff --git a/dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs b/dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs new file mode 100644 index 000000000000..e24da8fd7b20 --- /dev/null +++ b/dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Configuration; + +#pragma warning disable CA1812 // instantiated by AddUserSecrets +internal sealed class Env +#pragma warning restore CA1812 +{ + /// + /// Simple helper used to load env vars and secrets like credentials, + /// to avoid hard coding them in the sample code + /// + /// Secret name / Env var name + /// Value found in Secret Manager or Environment Variable + internal static string Var(string name) + { + var configuration = new ConfigurationBuilder() + .AddUserSecrets() + .Build(); + + var value = configuration[name]; + if (!string.IsNullOrEmpty(value)) + { + return value; + } + + value = Environment.GetEnvironmentVariable(name); + if (string.IsNullOrEmpty(value)) + { + throw new ArgumentException($"Secret / Env var not set: {name}"); + } + + return value; + } +} diff --git a/dotnet/samples/ApplicationInsightsExample/RepoUtils/RepoFiles.cs b/dotnet/samples/ApplicationInsightsExample/RepoUtils/RepoFiles.cs new file mode 100644 index 000000000000..dc15dfed4472 --- /dev/null +++ b/dotnet/samples/ApplicationInsightsExample/RepoUtils/RepoFiles.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Reflection; + +internal static class RepoFiles +{ + /// + /// Scan the local folders from the repo, looking for "samples/skills" folder. + /// + /// The full path to samples/skills + public static string SampleSkillsPath() + { + const string Parent = "samples"; + const string Folder = "skills"; + + bool SearchPath(string pathToFind, out string result, int maxAttempts = 10) + { + var currDir = Path.GetFullPath(Assembly.GetExecutingAssembly().Location); + bool found; + do + { + result = Path.Join(currDir, pathToFind); + found = Directory.Exists(result); + currDir = Path.GetFullPath(Path.Combine(currDir, "..")); + } while (maxAttempts-- > 0 && !found); + + return found; + } + + if (!SearchPath(Parent + Path.DirectorySeparatorChar + Folder, out string path) + && !SearchPath(Folder, out path)) + { + throw new DirectoryNotFoundException("Skills directory not found. The app needs the skills from the repo to work."); + } + + return path; + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs index de95f6d8d791..6c68f07d41f7 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs @@ -1,12 +1,13 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Threading.Tasks; using Microsoft.SemanticKernel.Skills.Core; // ReSharper disable once InconsistentNaming public static class Example01_NativeFunctions { - public static void Run() + public static Task RunAsync() { Console.WriteLine("======== Functions ========"); @@ -17,5 +18,7 @@ public static void Run() var result = text.Uppercase("ciao!"); Console.WriteLine(result); + + return Task.CompletedTask; } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs b/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs index fa6d81ac27b6..19afded9e935 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs @@ -14,14 +14,30 @@ public static async Task RunAsync() { Console.WriteLine("======== LLMPrompts ========"); + string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAIApiKey == null) + { + Console.WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-002", Env.Var("OPENAI_API_KEY"), serviceId: "text-davinci-002") - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService("text-davinci-002", openAIApiKey, serviceId: "text-davinci-002") + .WithOpenAITextCompletionService("text-davinci-003", openAIApiKey) .Build(); // Load native skill - using var bingConnector = new BingConnector(Env.Var("BING_API_KEY")); + string bingApiKey = TestConfiguration.Bing.ApiKey; + + if (bingApiKey == null) + { + Console.WriteLine("Bing credentials not found. Skipping example."); + return; + } + + var bingConnector = new BingConnector(bingApiKey); var bing = new WebSearchEngineSkill(bingConnector); var search = kernel.ImportSkill(bing, "bing"); @@ -33,7 +49,7 @@ public static async Task RunAsync() "SummarizeSkill"); // Run - var ask = "What's the tallest building in South America?"; + var ask = "What's the tallest building in South America"; var result1 = await kernel.RunAsync( ask, diff --git a/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs b/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs index 7f30437a83e3..6b6ed5392cf4 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs @@ -12,6 +12,15 @@ public static async Task RunAsync() { Console.WriteLine("======== Inline Function Definition ========"); + string openAIModelId = TestConfiguration.OpenAI.ModelId; + string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAIModelId == null || openAIApiKey == null) + { + Console.WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + /* * Example: normally you would place prompt templates in a folder to separate * C# code from natural language code, but you can also define a semantic @@ -20,7 +29,9 @@ public static async Task RunAsync() IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) .Build(); // Function defined using few-shot design pattern diff --git a/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs b/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs index 5903c2b96888..f5049ff060a8 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs @@ -18,9 +18,20 @@ public static async Task RunAsync() { Console.WriteLine("======== TemplateLanguage ========"); + string openAIModelId = TestConfiguration.OpenAI.ModelId; + string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAIModelId == null || openAIApiKey == null) + { + Console.WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) .Build(); // Load native skill into the kernel skill collection, sharing its functions with prompt templates diff --git a/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGoogleSkills.cs b/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGoogleSkills.cs index b96ae2f6c6e8..a71a30991518 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGoogleSkills.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGoogleSkills.cs @@ -21,37 +21,68 @@ public static class Example07_BingAndGoogleSkills { public static async Task RunAsync() { + string openAIModelId = TestConfiguration.OpenAI.ModelId; + string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAIModelId == null || openAIApiKey == null) + { + Console.WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) .Build(); // Load Bing skill - using var bingConnector = new BingConnector(Env.Var("BING_API_KEY")); - kernel.ImportSkill(new WebSearchEngineSkill(bingConnector), "bing"); + string bingApiKey = TestConfiguration.Bing.ApiKey; + + if (bingApiKey == null) + { + Console.WriteLine("Bing credentials not found. Skipping example."); + } + else + { + var bingConnector = new BingConnector(bingApiKey); + var bing = new WebSearchEngineSkill(bingConnector); + var search = kernel.ImportSkill(bing, "bing"); + await Example1Async(kernel, "bing"); + await Example2Async(kernel); + } // Load Google skill - using var googleConnector = new GoogleConnector(Env.Var("GOOGLE_API_KEY"), Env.Var("GOOGLE_SEARCH_ENGINE_ID")); - kernel.ImportSkill(new WebSearchEngineSkill(googleConnector), "google"); + string googleApiKey = TestConfiguration.Google.ApiKey; + string googleSearchEngineId = TestConfiguration.Google.SearchEngineId; - await Example1Async(kernel); - await Example2Async(kernel); + if (googleApiKey == null || googleSearchEngineId == null) + { + Console.WriteLine("Google credentials not found. Skipping example."); + } + else + { + using var googleConnector = new GoogleConnector( + apiKey: googleApiKey, + searchEngineId: googleSearchEngineId); + var google = new WebSearchEngineSkill(googleConnector); + var search = kernel.ImportSkill(new WebSearchEngineSkill(googleConnector), "google"); + await Example1Async(kernel, "google"); + } } - private static async Task Example1Async(IKernel kernel) + private static async Task Example1Async(IKernel kernel, string searchSkillId) { Console.WriteLine("======== Bing and Google Search Skill ========"); // Run var question = "What's the largest building in the world?"; - var bingResult = await kernel.Func("bing", "search").InvokeAsync(question); - var googleResult = await kernel.Func("google", "search").InvokeAsync(question); + var result = await kernel.Func(searchSkillId, "search").InvokeAsync(question); Console.WriteLine(question); - Console.WriteLine("----"); - Console.WriteLine(bingResult); - Console.WriteLine("----"); - Console.WriteLine(googleResult); + Console.WriteLine($"----{searchSkillId}----"); + Console.WriteLine(result); /* OUTPUT: @@ -92,7 +123,7 @@ [EXAMPLE 2] * The smallest positive number is 1. [EXAMPLE 3] -Question: what's Ferrari stock price ? Who is the current number one female tennis player in the world? +Question: what's Ferrari stock price? Who is the current number one female tennis player in the world? Answer: {{ '{{' }} bing.search ""what\\'s Ferrari stock price?"" {{ '}}' }}. {{ '{{' }} bing.search ""Who is the current number one female tennis player in the world?"" {{ '}}' }}. diff --git a/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs b/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs index c8e5098d6cb8..fe2c7bf5267f 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs @@ -33,12 +33,12 @@ public static async Task RunAsync() await RunRetryHandlerConfigAsync(new HttpRetryConfig() { MaxRetryCount = 3, UseExponentialBackoff = true }); } - private static async Task RunRetryHandlerConfigAsync(HttpRetryConfig? config = null) + private static async Task RunRetryHandlerConfigAsync(HttpRetryConfig? httpConfig = null) { var kernelBuilder = Kernel.Builder.WithLogger(InfoLogger.Log); - if (config != null) + if (httpConfig != null) { - kernelBuilder = kernelBuilder.Configure(c => c.SetDefaultHttpRetryConfig(config)); + kernelBuilder = kernelBuilder.Configure(c => c.SetDefaultHttpRetryConfig(httpConfig)); } // Add 401 to the list of retryable status codes @@ -46,7 +46,7 @@ private static async Task RunRetryHandlerConfigAsync(HttpRetryConfig? config = n // purposes we are doing so as it's easy to trigger when using an invalid key. kernelBuilder = kernelBuilder.Configure(c => c.DefaultHttpRetryConfig.RetryableStatusCodes.Add(HttpStatusCode.Unauthorized)); - // OpenAI settings - you can set the OPENAI_API_KEY to an invalid value to see the retry policy in play + // OpenAI settings - you can set the OpenAI.ApiKey to an invalid value to see the retry policy in play kernelBuilder = kernelBuilder.WithOpenAITextCompletionService("text-davinci-003", "BAD_KEY"); var kernel = kernelBuilder.Build(); @@ -58,7 +58,7 @@ private static IKernel InitializeKernel() { var kernel = Kernel.Builder .WithLogger(InfoLogger.Log) - // OpenAI settings - you can set the OPENAI_API_KEY to an invalid value to see the retry policy in play + // OpenAI settings - you can set the OpenAI.ApiKey to an invalid value to see the retry policy in play .WithOpenAITextCompletionService("text-davinci-003", "BAD_KEY") .Build(); @@ -75,7 +75,7 @@ private static async Task RunRetryPolicyBuilderAsync(Type retryHandlerFactoryTyp { var kernel = Kernel.Builder.WithLogger(InfoLogger.Log) .WithRetryHandlerFactory((Activator.CreateInstance(retryHandlerFactoryType) as IDelegatingHandlerFactory)!) - // OpenAI settings - you can set the OPENAI_API_KEY to an invalid value to see the retry policy in play + // OpenAI settings - you can set the OpenAI.ApiKey to an invalid value to see the retry policy in play .WithOpenAITextCompletionService("text-davinci-003", "BAD_KEY") .Build(); @@ -96,7 +96,7 @@ private static async Task ImportAndExecuteSkillAsync(IKernel kernel) var question = "How popular is Polly library?"; InfoLogger.Log.LogInformation("Question: {0}", question); - // To see the retry policy in play, you can set the OPENAI_API_KEY to an invalid value + // To see the retry policy in play, you can set the OpenAI.ApiKey to an invalid value var answer = await kernel.RunAsync(question, qaSkill["Question"]); InfoLogger.Log.LogInformation("Answer: {0}", answer); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs b/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs index 883a3787b228..dba44c5bd935 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs @@ -20,7 +20,7 @@ public static async Task RunAsync() var kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey) .Build(); // Load native skill into the kernel skill collection, sharing its functions with prompt templates diff --git a/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllSkillsAndFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllSkillsAndFunctions.cs index 3664e9954ad9..4e185cf547ce 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllSkillsAndFunctions.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllSkillsAndFunctions.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.SkillDefinition; using Microsoft.SemanticKernel.Skills.Core; @@ -17,12 +18,14 @@ public static class Example10_DescribeAllSkillsAndFunctions /// list of parameters, parameters descriptions, etc. /// See the end of the file for a sample of what the output looks like. /// - public static void Run() + public static Task RunAsync() { Console.WriteLine("======== Describe all skills and functions ========"); var kernel = Kernel.Builder - .WithOpenAITextCompletionService("text-davinci-003", "none") + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .Build(); // Import a native skill @@ -73,6 +76,8 @@ public static void Run() Console.WriteLine("Skill: " + skill.Key); foreach (FunctionView func in skill.Value) { PrintFunction(func); } } + + return Task.CompletedTask; } private static void PrintFunction(FunctionView func) diff --git a/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs index 67bcb373d815..15f1b557db00 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs @@ -70,9 +70,9 @@ private static async Task PoetrySamplesAsync() var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")) + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .Build(); string folder = RepoFiles.SampleSkillsPath(); @@ -92,7 +92,7 @@ private static async Task PoetrySamplesAsync() // - WriterSkill.Translate language='Italian' INPUT='' => Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanString()); + Console.WriteLine(plan.ToPlanWithGoalString()); var result = await kernel.RunAsync(plan); @@ -124,7 +124,7 @@ private static async Task EmailSamplesAsync() // - email.SendEmail INPUT='$TRANSLATED_SUMMARY' email_address='$EMAIL_ADDRESS' => Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanString()); + Console.WriteLine(plan.ToPlanWithGoalString()); var input = "Once upon a time, in a faraway kingdom, there lived a kind and just king named Arjun. " + @@ -166,7 +166,7 @@ private static async Task BookSamplesAsync() // - WriterSkill.NovelChapter chapterIndex='3' previousChapter='$CHAPTER_2_SYNOPSIS' INPUT='$CHAPTER_3_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_3 Console.WriteLine("Original plan:"); - Console.WriteLine(originalPlan.ToPlanString()); + Console.WriteLine(originalPlan.ToPlanWithGoalString()); Stopwatch sw = new(); sw.Start(); @@ -180,13 +180,13 @@ private static async Task MemorySampleAsync() var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureChatCompletionService( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_CHAT_ENDPOINT"), - Env.Var("AZURE_OPENAI_CHAT_KEY")) + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .WithAzureTextEmbeddingGenerationService( - Env.Var("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_EMBEDDINGS_ENDPOINT"), - Env.Var("AZURE_OPENAI_EMBEDDINGS_KEY")) + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + TestConfiguration.AzureOpenAIEmbeddings.ApiKey) .WithMemoryStorage(new VolatileMemoryStore()) .Build(); @@ -215,7 +215,7 @@ private static async Task MemorySampleAsync() var plan = await planner.CreatePlanAsync(goal); Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanString()); + Console.WriteLine(plan.ToPlanWithGoalString()); } private static IKernel InitializeKernelAndPlanner(out SequentialPlanner planner, int maxTokens = 1024) @@ -223,9 +223,9 @@ private static IKernel InitializeKernelAndPlanner(out SequentialPlanner planner, var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureChatCompletionService( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_CHAT_ENDPOINT"), - Env.Var("AZURE_OPENAI_CHAT_KEY")) + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .Build(); planner = new SequentialPlanner(kernel, new SequentialPlannerConfig { MaxTokens = maxTokens }); diff --git a/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummarySkill.cs b/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummarySkill.cs index 96fb77d7c261..b8fb1b24fdc3 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummarySkill.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummarySkill.cs @@ -181,9 +181,9 @@ private static IKernel InitializeKernel() IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) .WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")) + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .Build(); return kernel; diff --git a/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs b/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs index f0811d8b4ea0..a26db827821f 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; using Microsoft.SemanticKernel.Memory; using RepoUtils; @@ -35,7 +36,8 @@ public static async Task RunAsync() var kernelWithACS = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithAzureCognitiveSearchMemory(Env.Var("ACS_ENDPOINT"), Env.Var("ACS_API_KEY")) + .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) + .WithMemoryStorage(new AzureCognitiveSearchMemoryStore(TestConfiguration.ACS.Endpoint, TestConfiguration.ACS.ApiKey)) .Build(); await RunExampleAsync(kernelWithACS); @@ -55,7 +57,7 @@ public static async Task RunAsync() var kernelWithCustomDb = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextEmbeddingGenerationService("ada", "text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextEmbeddingGenerationService("ada", "text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(new VolatileMemoryStore()) .Build(); @@ -113,6 +115,7 @@ private static async Task SearchMemoryAsync(IKernel kernel, string query) Console.WriteLine($"Result {++i}:"); Console.WriteLine(" URL: : " + memory.Metadata.Id); Console.WriteLine(" Title : " + memory.Metadata.Description); + Console.WriteLine(" Relevance: " + memory.Relevance); Console.WriteLine(); } @@ -136,11 +139,11 @@ private static async Task StoreMemoryAsync(IKernel kernel) { await kernel.Memory.SaveReferenceAsync( collection: MemoryCollectionName, - description: entry.Value, - text: entry.Value, + externalSourceName: "GitHub", externalId: entry.Key, - externalSourceName: "GitHub" - ); + description: entry.Value, + text: entry.Value); + Console.Write($" #{++i} saved."); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs b/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs index 5d00f4751d88..7e02970e2b7a 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs @@ -17,8 +17,8 @@ public static async Task RunAsync() { var kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService("text-davinci-003", TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(new VolatileMemoryStore()) .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs b/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs index d8a316265075..f76e2db32717 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs @@ -5,7 +5,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with OpenAI ChatGPT API @@ -51,7 +50,7 @@ private static async Task OpenAIChatSampleAsync() { Console.WriteLine("======== Open AI - ChatGPT ========"); - OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", TestConfiguration.OpenAI.ApiKey); await StartChatAsync(openAIChatCompletion); } @@ -61,9 +60,9 @@ private static async Task AzureOpenAIChatSampleAsync() Console.WriteLine("======== Azure Open AI - ChatGPT ========"); AzureChatCompletion azureChatCompletion = new( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await StartChatAsync(azureChatCompletion); } @@ -85,7 +84,7 @@ private static async Task StartChatAsync(IChatCompletion chatGPT) await MessageOutputAsync(chatHistory); // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); await MessageOutputAsync(chatHistory); // Second bot assistant message diff --git a/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs b/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs index e2482cd7583e..46994b737eaa 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs @@ -28,9 +28,9 @@ private static async Task OpenAIDallEAsync() IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) // Add your image generation service - .WithOpenAIImageGenerationService(Env.Var("OPENAI_API_KEY")) + .WithOpenAIImageGenerationService(TestConfiguration.OpenAI.ApiKey) // Add your chat completion service - .WithOpenAIChatCompletionService("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")) + .WithOpenAIChatCompletionService("gpt-3.5-turbo", TestConfiguration.OpenAI.ApiKey) .Build(); IImageGeneration dallE = kernel.GetService(); @@ -97,9 +97,9 @@ public static async Task AzureOpenAIDallEAsync() IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) // Add your image generation service - .WithAzureOpenAIImageGenerationService(Env.Var("AZURE_OPENAI_ENDPOINT"), Env.Var("AZURE_OPENAI_API_KEY")) + .WithAzureOpenAIImageGenerationService(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey) // Add your chat completion service - .WithAzureChatCompletionService("gpt-35-turbo", Env.Var("AZURE_OPENAI_ENDPOINT"), Env.Var("AZURE_OPENAI_API_KEY")) + .WithAzureChatCompletionService("gpt-35-turbo", TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey) .Build(); IImageGeneration dallE = kernel.GetService(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example19_Qdrant.cs b/dotnet/samples/KernelSyntaxExamples/Example19_Qdrant.cs index c87e1c5dd558..8b310de302e8 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example19_Qdrant.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example19_Qdrant.cs @@ -14,13 +14,13 @@ public static class Example19_Qdrant public static async Task RunAsync() { - QdrantMemoryStore memoryStore = new(Env.Var("QDRANT_ENDPOINT"), 1536, ConsoleLogger.Log); + QdrantMemoryStore memoryStore = new(TestConfiguration.Qdrant.Endpoint, 1536, ConsoleLogger.Log); IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService("text-davinci-003", TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) - //.WithQdrantMemoryStore(Env.Var("QDRANT_ENDPOINT"), 1536) // This method offers an alternative approach to registering Qdrant memory store. + //.WithQdrantMemoryStore(TestConfiguration.Qdrant.Endpoint, 1536) // This method offers an alternative approach to registering Qdrant memory store. .Build(); Console.WriteLine("== Printing Collections in DB =="); diff --git a/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs b/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs index 500b2965b34e..4f4590d075b3 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs @@ -18,7 +18,9 @@ public static async Task RunAsync() IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithHuggingFaceTextCompletionService("gpt2", apiKey: Env.Var("HF_API_KEY")) + .WithHuggingFaceTextCompletionService( + model: TestConfiguration.HuggingFace.ApiKey, + apiKey: TestConfiguration.HuggingFace.ApiKey) .Build(); const string FunctionDefinition = "Question: {{$input}}; Answer:"; diff --git a/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiSkill_AzureKeyVault.cs b/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiSkill_AzureKeyVault.cs index 153921eda7bd..d2aa41ac2789 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiSkill_AzureKeyVault.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiSkill_AzureKeyVault.cs @@ -20,8 +20,8 @@ public static async Task RunAsync() // To run this example, you must register a client application with the Microsoft identity platform. // Instructions here: https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app var authenticationProvider = new InteractiveMsalAuthenticationProvider( - Env.Var("AZURE_KEYVAULT_CLIENTID"), - Env.Var("AZURE_KEYVAULT_TENANTID"), + TestConfiguration.KeyVault.ClientId, + TestConfiguration.KeyVault.TenantId, new[] { "https://vault.azure.net/.default" }, new Uri("http://localhost")); @@ -49,7 +49,7 @@ public static async Task GetSecretFromAzureKeyVaultWithRetryAsync(InteractiveMsa // Add arguments for required parameters, arguments for optional ones can be skipped. var contextVariables = new ContextVariables(); - contextVariables.Set("server-url", "https://.vault.azure.net"); + contextVariables.Set("server-url", TestConfiguration.KeyVault.Endpoint); contextVariables.Set("secret-name", ""); contextVariables.Set("api-version", "7.0"); @@ -73,7 +73,7 @@ public static async Task AddSecretToAzureKeyVaultAsync(InteractiveMsalAuthentica // Add arguments for required parameters, arguments for optional ones can be skipped. var contextVariables = new ContextVariables(); - contextVariables.Set("server-url", "https://.vault.azure.net"); + contextVariables.Set("server-url", TestConfiguration.KeyVault.Endpoint); contextVariables.Set("secret-name", ""); contextVariables.Set("api-version", "7.0"); contextVariables.Set("payload", JsonSerializer.Serialize(new { value = "", attributes = new { enabled = true } })); diff --git a/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiSkill_Github.cs b/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiSkill_Github.cs index 6cf3043c60af..9ab3f35199d8 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiSkill_Github.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiSkill_Github.cs @@ -14,7 +14,7 @@ /// /// Import and run GitHub Functions using OpenAPI Skill. /// To use this example, run: -/// dotnet user-secrets set "GITHUB_PERSONAL_ACCESS_TOKEN" "github_pat_..." +/// dotnet user-secrets set "Github.PAT" "github_pat_..." /// Make sure your GitHub PAT has read permissions set for Pull Requests. /// Creating a PAT: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token /// @@ -23,7 +23,7 @@ public static class Example23_OpenApiSkill_GitHub { public static async Task RunAsync() { - var authenticationProvider = new BearerAuthenticationProvider(() => { return Task.FromResult(Env.Var("GITHUB_PERSONAL_ACCESS_TOKEN")); }); + var authenticationProvider = new BearerAuthenticationProvider(() => { return Task.FromResult(TestConfiguration.Github.PAT); }); Console.WriteLine("== Example22_c_OpenApiSkill_GitHub =="); var firstPRNumber = await ListPullRequestsFromGitHubAsync(authenticationProvider); await GetPullRequestFromGitHubAsync(authenticationProvider, firstPRNumber); diff --git a/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiSkill_Jira.cs b/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiSkill_Jira.cs index 92b2b6b9da81..215d080707eb 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiSkill_Jira.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiSkill_Jira.cs @@ -26,13 +26,13 @@ public static async Task RunAsync() var contextVariables = new ContextVariables(); // Change to a jira instance you have access to with your authentication credentials - string serverUrl = "https://.atlassian.net/rest/api/latest/"; + string serverUrl = $"https://{TestConfiguration.Jira.Domain}.atlassian.net/rest/api/latest/"; contextVariables.Set("server-url", serverUrl); IDictionary jiraSkills; var tokenProvider = new BasicAuthenticationProvider(() => { - string s = Env.Var("MY_EMAIL_ADDRESS") + ":" + Env.Var("JIRA_API_KEY"); + string s = $"{TestConfiguration.Jira.Email}:{TestConfiguration.Jira.ApiKey}"; return Task.FromResult(s); }); diff --git a/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs index 9a9fd7b1f2b2..69c8be80523b 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs @@ -15,7 +15,7 @@ public static async Task RunAsync() Console.WriteLine("======== Action Planner ========"); var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-002", Env.Var("OPENAI_API_KEY"))// Note: Action Planner works with old models like text-davinci-002 + .WithOpenAITextCompletionService("text-davinci-002", TestConfiguration.OpenAI.ApiKey)// Note: Action Planner works with old models like text-davinci-002 .Build(); string folder = RepoFiles.SampleSkillsPath(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example29_Tokenizer.cs b/dotnet/samples/KernelSyntaxExamples/Example29_Tokenizer.cs index 8659658dc2d5..84bad7fe3a61 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example29_Tokenizer.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example29_Tokenizer.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Threading.Tasks; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.Tokenizers; // ReSharper disable once InconsistentNaming @@ -14,7 +15,7 @@ /// public static class Example29_Tokenizer { - public static void Run() + public static Task RunAsync() { // Example 1 string sentence = "Some text on one line"; @@ -85,5 +86,7 @@ two lines Tokens: 7 --- */ + + return Task.CompletedTask; } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs b/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs index f438a0570a26..5db59b8afde2 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs @@ -65,7 +65,7 @@ public static async Task RunAsync() // Usual kernel initialization, with GPT 3.5 Turbo IKernel kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) - .WithOpenAIChatCompletionService("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY"), serviceId: "chat") + .WithOpenAIChatCompletionService("gpt-3.5-turbo", TestConfiguration.OpenAI.ApiKey, serviceId: "chat") .Build(); // As an example, we import the time skill, which is used in system prompt to read the current date. diff --git a/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs index 2ec4a8af528d..13a7ca482224 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs @@ -114,7 +114,7 @@ private static IDictionary LoadQASkill(IKernel kernel) string folder = RepoFiles.SampleSkillsPath(); kernel.ImportSkill(new TimeSkill(), "time"); #pragma warning disable CA2000 // Dispose objects before losing scope - var bing = new WebSearchEngineSkill(new BingConnector(Env.Var("BING_API_KEY"))); + var bing = new WebSearchEngineSkill(new BingConnector(TestConfiguration.Bing.ApiKey)); #pragma warning restore CA2000 // Dispose objects before losing scope var search = kernel.ImportSkill(bing, "bing"); @@ -126,13 +126,13 @@ private static IKernel InitializeKernel() return new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")) + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .WithAzureTextEmbeddingGenerationService( - Env.Var("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_EMBEDDINGS_ENDPOINT"), - Env.Var("AZURE_OPENAI_EMBEDDINGS_KEY")) + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .WithMemoryStorage(new VolatileMemoryStore()) .Build(); } @@ -147,7 +147,7 @@ public async Task RunMarkupAsync(string docString, SKContext context) var plan = docString.FromMarkup("Run a piece of xml markup", context); Console.WriteLine("Markup plan:"); - Console.WriteLine(plan.ToPlanString()); + Console.WriteLine(plan.ToPlanWithGoalString()); Console.WriteLine(); var result = await plan.InvokeAsync(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs index fd1a6e729309..db362fd8e9b0 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs @@ -4,7 +4,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.TextCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Text Completion as streaming @@ -23,9 +22,9 @@ private static async Task AzureOpenAITextCompletionStreamAsync() Console.WriteLine("======== Azure OpenAI - Text Completion - Raw Streaming ========"); var textCompletion = new AzureTextCompletion( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await TextCompletionStreamAsync(textCompletion); } @@ -34,7 +33,7 @@ private static async Task OpenAITextCompletionStreamAsync() { Console.WriteLine("======== Open AI - Text Completion - Raw Streaming ========"); - var textCompletion = new OpenAITextCompletion("text-davinci-003", Env.Var("OPENAI_API_KEY")); + var textCompletion = new OpenAITextCompletion("text-davinci-003", TestConfiguration.OpenAI.ApiKey); await TextCompletionStreamAsync(textCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs b/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs index a128023b2ae2..f6ebe85e512e 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs @@ -5,7 +5,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Text Completion as streaming @@ -23,7 +22,7 @@ private static async Task OpenAIChatStreamSampleAsync() { Console.WriteLine("======== Open AI - ChatGPT Streaming ========"); - OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", TestConfiguration.OpenAI.ApiKey); await StartStreamingChatAsync(openAIChatCompletion); } @@ -33,9 +32,9 @@ private static async Task AzureOpenAIChatStreamSampleAsync() Console.WriteLine("======== Azure Open AI - ChatGPT Streaming ========"); AzureChatCompletion azureChatCompletion = new( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await StartStreamingChatAsync(azureChatCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs index bc954f4a5390..e02bdb5e8ffc 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs @@ -4,7 +4,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.TextCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming @@ -23,9 +22,9 @@ private static async Task AzureOpenAIMultiTextCompletionAsync() Console.WriteLine("======== Azure OpenAI - Multiple Text Completion ========"); var textCompletion = new AzureTextCompletion( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await TextCompletionAsync(textCompletion); } @@ -36,7 +35,7 @@ private static async Task OpenAIMultiTextCompletionAsync() ITextCompletion textCompletion = new OpenAITextCompletion( "text-davinci-003", - Env.Var("OPENAI_API_KEY")); + TestConfiguration.OpenAI.ApiKey); await TextCompletionAsync(textCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs index 2cf2123f96af..3c56d0941852 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs @@ -5,7 +5,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.TextCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming @@ -26,9 +25,9 @@ private static async Task AzureOpenAIMultiTextCompletionStreamAsync() Console.WriteLine("======== Azure OpenAI - Multiple Text Completion - Raw Streaming ========"); var textCompletion = new AzureTextCompletion( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await TextCompletionStreamAsync(textCompletion); } @@ -39,7 +38,7 @@ private static async Task OpenAITextCompletionStreamAsync() ITextCompletion textCompletion = new OpenAITextCompletion( "text-davinci-003", - Env.Var("OPENAI_API_KEY")); + TestConfiguration.OpenAI.ApiKey); await TextCompletionStreamAsync(textCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example38_Pinecone.cs b/dotnet/samples/KernelSyntaxExamples/Example38_Pinecone.cs index 7ee2c94ebbf0..722b772e4edc 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example38_Pinecone.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example38_Pinecone.cs @@ -22,15 +22,15 @@ public static class Example38_Pinecone public static async Task RunAsync() { - string apiKey = Env.Var("PINECONE_API_KEY"); - string pineconeEnvironment = Env.Var("PINECONE_ENVIRONMENT"); + string apiKey = TestConfiguration.Pinecone.ApiKey; + string pineconeEnvironment = TestConfiguration.Pinecone.Environment; PineconeMemoryStore memoryStore = new(pineconeEnvironment, apiKey); IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) //.WithPineconeMemoryStore(pineconeEnvironment, apiKey) // This method offers an alternative approach to registering Pinecone memory storage. .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example39_Postgres.cs b/dotnet/samples/KernelSyntaxExamples/Example39_Postgres.cs index c8e28a37bb27..3fd2ba4ef4a3 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example39_Postgres.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example39_Postgres.cs @@ -16,7 +16,7 @@ public static class Example39_Postgres public static async Task RunAsync() { - NpgsqlDataSourceBuilder dataSourceBuilder = new(Env.Var("POSTGRES_CONNECTIONSTRING")); + NpgsqlDataSourceBuilder dataSourceBuilder = new(TestConfiguration.Postgres.ConnectionString); dataSourceBuilder.UseVector(); await using NpgsqlDataSource dataSource = dataSourceBuilder.Build(); @@ -24,8 +24,12 @@ public static async Task RunAsync() IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService( + modelId: TestConfiguration.OpenAI.EmbeddingModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) //.WithPostgresMemoryStore(dataSource, vectorSize: 1536, schema: "public") // This method offers an alternative approach to registering Postgres memory store. .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs b/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs index 7b7189dce318..619fc2d27d46 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs @@ -43,7 +43,7 @@ private static async Task UseKernelInDIPowerAppAsync() { return Kernel.Builder .WithLogger(serviceProvider.GetRequiredService()) - .WithOpenAITextCompletionService("text-davinci-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService("text-davinci-002", TestConfiguration.OpenAI.ApiKey) .Build(); }); @@ -72,7 +72,7 @@ private static async Task UseKernelInDIPowerApp_AdvancedScenarioAsync() //Registering AI services Kernel is going to use var aiServicesCollection = new AIServiceCollection(); - aiServicesCollection.SetService(() => new OpenAITextCompletion("text-davinci-002", Env.Var("OPENAI_API_KEY"))); + aiServicesCollection.SetService(() => new OpenAITextCompletion("text-davinci-002", TestConfiguration.OpenAI.ApiKey)); //Registering Kernel dependencies var collection = new ServiceCollection(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs b/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs index cc72cdeab1aa..68a45b0cae6b 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs @@ -2,6 +2,7 @@ using System; using System.Net.Http; +using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; @@ -11,46 +12,53 @@ public static class Example41_HttpClientUsage { - public static void Run() + public static Task RunAsync() { //Examples showing how to use HttpClient. - UseDefaultHttpClientAsync(); + UseDefaultHttpClient(); - UseCustomHttpClientAsync(); + UseCustomHttpClient(); //Examples showing how to use HttpClientFactory. - UseBasicRegistrationWithHttpClientFactoryAsync(); + UseBasicRegistrationWithHttpClientFactory(); - UseNamedRegistrationWitHttpClientFactoryAsync(); + UseNamedRegistrationWitHttpClientFactory(); + + return Task.CompletedTask; } /// /// Demonstrates the usage of the default HttpClient provided by the SK SDK. /// - private static void UseDefaultHttpClientAsync() + private static void UseDefaultHttpClient() { var kernel = Kernel.Builder - .WithOpenAITextCompletionService("", "") // If you need to use the default HttpClient from the SK SDK, simply omit the argument for the httpMessageInvoker parameter. + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) // If you need to use the default HttpClient from the SK SDK, simply omit the argument for the httpMessageInvoker parameter. .Build(); } /// /// Demonstrates the usage of a custom HttpClient. /// - private static void UseCustomHttpClientAsync() + private static void UseCustomHttpClient() { using var httpClient = new HttpClient(); // If you need to use a custom HttpClient, simply pass it as an argument for the httpClient parameter. var kernel = Kernel.Builder - .WithOpenAITextCompletionService("", "", httpClient: httpClient) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + httpClient: httpClient) .Build(); } /// /// Demonstrates the "basic usage" approach for HttpClientFactory. /// - private static void UseBasicRegistrationWithHttpClientFactoryAsync() + private static void UseBasicRegistrationWithHttpClientFactory() { //More details - https://learn.microsoft.com/en-us/dotnet/core/extensions/httpclient-factory#basic-usage var serviceCollection = new ServiceCollection(); @@ -61,7 +69,10 @@ private static void UseBasicRegistrationWithHttpClientFactoryAsync() var factory = sp.GetRequiredService(); var kernel = Kernel.Builder - .WithOpenAITextCompletionService("", "", httpClient: factory.CreateClient()) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + httpClient: factory.CreateClient()) .Build(); return kernel; @@ -71,7 +82,7 @@ private static void UseBasicRegistrationWithHttpClientFactoryAsync() /// /// Demonstrates the "named clients" approach for HttpClientFactory. /// - private static void UseNamedRegistrationWitHttpClientFactoryAsync() + private static void UseNamedRegistrationWitHttpClientFactory() { // More details https://learn.microsoft.com/en-us/dotnet/core/extensions/httpclient-factory#named-clients @@ -89,7 +100,10 @@ private static void UseNamedRegistrationWitHttpClientFactoryAsync() var factory = sp.GetRequiredService(); var kernel = Kernel.Builder - .WithOpenAITextCompletionService("", "", httpClient: factory.CreateClient("test-client")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + httpClient: factory.CreateClient("test-client")) .Build(); return kernel; diff --git a/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs b/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs index b18e65d1f358..43a34b2e0b2b 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs @@ -28,8 +28,13 @@ // ReSharper disable once InconsistentNaming public static class Example42_KernelBuilder { - public static void Run() + public static Task RunAsync() { + string azureOpenAIKey = TestConfiguration.AzureOpenAI.ApiKey; + string azureOpenAIEndpoint = TestConfiguration.AzureOpenAI.Endpoint; + string azureOpenAITextCompletionDeployment = TestConfiguration.AzureOpenAI.DeploymentName; + string azureOpenAIEmbeddingDeployment = TestConfiguration.AzureOpenAIEmbeddings.DeploymentName; + #pragma warning disable CA1852 // Seal internal types IKernel kernel1 = Kernel.Builder.Build(); #pragma warning restore CA1852 // Seal internal types @@ -65,21 +70,30 @@ public static void Run() // Manually setup all the dependencies used internally by the kernel var logger = NullLogger.Instance; var memoryStorage = new VolatileMemoryStore(); - var textEmbeddingGenerator = new AzureTextEmbeddingGeneration("modelId", "https://...", "apiKey", logger: logger); + var textEmbeddingGenerator = new AzureTextEmbeddingGeneration( + modelId: azureOpenAIEmbeddingDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey, + logger: logger); using var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator); var skills = new SkillCollection(); var templateEngine = new PromptTemplateEngine(logger); - var config = new KernelConfig(); + var kernelConfig = new KernelConfig(); using var httpHandler = new DefaultHttpRetryHandler(new HttpRetryConfig(), logger); using var httpClient = new HttpClient(httpHandler); var aiServices = new AIServiceCollection(); - ITextCompletion Factory() => new AzureTextCompletion("deploymentName", "https://...", "apiKey", httpClient, logger); + ITextCompletion Factory() => new AzureTextCompletion( + modelId: azureOpenAITextCompletionDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey, + httpClient, + logger); aiServices.SetService("foo", Factory); IAIServiceProvider aiServiceProvider = aiServices.Build(); // Create kernel manually injecting all the dependencies - using var kernel3 = new Kernel(skills, aiServiceProvider, templateEngine, memory, config, logger); + using var kernel3 = new Kernel(skills, aiServiceProvider, templateEngine, memory, kernelConfig, logger); // ========================================================================================================== // The kernel builder purpose is to simplify this process, automating how dependencies @@ -89,7 +103,10 @@ public static void Run() var kernel4 = Kernel.Builder .WithLogger(NullLogger.Instance) .WithMemory(memory) - .WithAzureTextCompletionService("deploymentName", "https://...", "apiKey") + .WithAzureTextCompletionService( + deploymentName: azureOpenAITextCompletionDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey) .Build(); // Example: how to use a custom memory storage and custom embedding generator @@ -102,15 +119,25 @@ public static void Run() var kernel6 = Kernel.Builder .WithLogger(NullLogger.Instance) .WithMemoryStorage(memoryStorage) // Custom memory storage - .WithAzureTextCompletionService("myName1", "completionDeploymentName", "https://...", "apiKey") // This will be used when using AI completions - .WithAzureTextEmbeddingGenerationService("myName2", "embeddingsDeploymentName", "https://...", "apiKey") // This will be used when indexing memory records + .WithAzureTextCompletionService( + deploymentName: azureOpenAITextCompletionDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey) // This will be used when using AI completions + .WithAzureTextEmbeddingGenerationService( + deploymentName: azureOpenAIEmbeddingDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey) // This will be used when indexing memory records .Build(); // ========================================================================================================== // The AI services are defined with the builder var kernel7 = Kernel.Builder - .WithAzureTextCompletionService("myName1", "completionDeploymentName", "https://...", "apiKey", true) + .WithAzureTextCompletionService( + deploymentName: azureOpenAITextCompletionDeployment, + endpoint: azureOpenAIEndpoint, + apiKey: azureOpenAIKey, + setAsDefault: true) .Build(); // ========================================================================================================== @@ -136,6 +163,8 @@ public static void Run() .Build(); var kernel10 = Kernel.Builder.WithRetryHandlerFactory(new RetryThreeTimesFactory()).Build(); + + return Task.CompletedTask; } // Example of a basic custom retry handler diff --git a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs index 9d1573dbcab2..3942d7564c0c 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs @@ -19,7 +19,9 @@ public static async Task RunAsync() Console.WriteLine("======== Inline Function Definition + Result ========"); IKernel kernel = new KernelBuilder() - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .Build(); // Function defined using few-shot design pattern @@ -50,14 +52,10 @@ Be creative and be funny. Let your imagination run wild. Console.WriteLine(textResult.ModelResults.LastOrDefault()?.GetOpenAITextResult()?.Usage.AsJson()); Console.WriteLine(); - // Using the Kernel RunAsync - textResult = await kernel.RunAsync("sorry I forgot your birthday", excuseFunction); - Console.WriteLine(textResult); - Console.WriteLine(textResult.ModelResults.LastOrDefault()?.GetOpenAITextResult()?.Usage.AsJson()); - Console.WriteLine(); - // Using Chat Completion directly - var chatCompletion = new OpenAIChatCompletion("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + var chatCompletion = new OpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); var prompt = FunctionDefinition.Replace("{{$input}}", $"Translate this date {DateTimeOffset.Now:f} to French format", StringComparison.InvariantCultureIgnoreCase); IReadOnlyList completionResults = await chatCompletion.GetCompletionsAsync(prompt, new CompleteRequestSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); diff --git a/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs index 18b09aacf455..8494ebac5613 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs @@ -5,7 +5,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming @@ -24,9 +23,9 @@ private static async Task AzureOpenAIMultiChatCompletionAsync() Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion ========"); AzureChatCompletion azureChatCompletion = new( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await RunChatAsync(azureChatCompletion); } @@ -35,7 +34,7 @@ private static async Task OpenAIMultiChatCompletionAsync() { Console.WriteLine("======== Open AI - Multiple Chat Completion ========"); - OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + OpenAIChatCompletion openAIChatCompletion = new(modelId: TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); await RunChatAsync(openAIChatCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs index 963dac04727c..58de8c812cfd 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs @@ -7,7 +7,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; /** * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming @@ -28,9 +27,9 @@ private static async Task AzureOpenAIMultiStreamingChatCompletionAsync() Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion - Raw Streaming ========"); AzureChatCompletion azureChatCompletion = new( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); await StreamingChatCompletionAsync(azureChatCompletion); } @@ -39,7 +38,9 @@ private static async Task OpenAIMultiStreamingChatCompletionAsync() { Console.WriteLine("======== Open AI - Multiple Text Completion - Raw Streaming ========"); - OpenAIChatCompletion openAIChatCompletion = new("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + OpenAIChatCompletion openAIChatCompletion = new( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); await StreamingChatCompletionAsync(openAIChatCompletion); } diff --git a/dotnet/samples/KernelSyntaxExamples/Example46_Weaviate.cs b/dotnet/samples/KernelSyntaxExamples/Example46_Weaviate.cs index 0faa0c27d83a..3976c5d20327 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example46_Weaviate.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example46_Weaviate.cs @@ -14,13 +14,17 @@ public static class Example46_Weaviate public static async Task RunAsync() { - string endpoint = Env.Var("WEAVIATE_ENDPOINT"); - string apiKey = Env.Var("WEAVIATE_APIKEY"); - using WeaviateMemoryStore memoryStore = new(endpoint, apiKey, ConsoleLogger.Log); + string endpoint = TestConfiguration.Weaviate.Endpoint; + string apiKey = TestConfiguration.Weaviate.ApiKey; + WeaviateMemoryStore memoryStore = new(endpoint, apiKey, ConsoleLogger.Log); IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService( + modelId: TestConfiguration.OpenAI.EmbeddingModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) //.WithWeaviateMemoryStore(endpoint, apiKey) // This method offers an alternative approach to registering Weaviate memory store. .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example47_Redis.cs b/dotnet/samples/KernelSyntaxExamples/Example47_Redis.cs index 655280bfbd93..6cbc90c7e365 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example47_Redis.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example47_Redis.cs @@ -15,14 +15,18 @@ public static class Example47_Redis public static async Task RunAsync() { - string configuration = Env.Var("REDIS_CONFIGURATION"); + string configuration = TestConfiguration.Redis.Configuration; await using ConnectionMultiplexer connectionMultiplexer = await ConnectionMultiplexer.ConnectAsync(configuration); IDatabase database = connectionMultiplexer.GetDatabase(); RedisMemoryStore memoryStore = new(database, vectorSize: 1536); IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService( + modelId: TestConfiguration.OpenAI.EmbeddingModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs b/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs index e9074ab6e608..f82f7fb8d385 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs @@ -61,9 +61,9 @@ public static async Task GroundednessCheckingSkill() var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_API_KEY")) + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .Build(); string folder = RepoFiles.SampleSkillsPath(); @@ -126,9 +126,9 @@ which are not grounded in the original. var kernel = new KernelBuilder() .WithLogger(ConsoleLogger.Log) .WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")) + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey) .Build(); string folder = RepoFiles.SampleSkillsPath(); @@ -138,10 +138,10 @@ which are not grounded in the original. kernel.ImportSkill(new TextSkill()); - var config = new SequentialPlannerConfig { }; - var planner = new SequentialPlanner(kernel, config); + var plannerConfig = new SequentialPlannerConfig { }; + var planner = new SequentialPlanner(kernel, plannerConfig); var plan = await planner.CreatePlanAsync(ask); - Console.WriteLine(plan.ToPlanString()); + Console.WriteLine(plan.ToPlanWithGoalString()); var results = await plan.InvokeAsync(s_groundingText); Console.WriteLine(results.Result); diff --git a/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs b/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs index b615b3c28653..80fff5937296 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs @@ -5,7 +5,6 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; /** * Logit_bias is an optional parameter that modifies the likelihood of specified tokens appearing in a Completion. @@ -16,7 +15,7 @@ public static class Example49_LogitBias { public static async Task RunAsync() { - OpenAIChatCompletion chatCompletion = new("gpt-3.5-turbo", Env.Var("OPENAI_API_KEY")); + OpenAIChatCompletion chatCompletion = new("gpt-3.5-turbo", TestConfiguration.OpenAI.ApiKey); // To use Logit Bias you will need to know the token ids of the words you want to use. // Getting the token ids using the GPT Tokenizer: https://platform.openai.com/tokenizer @@ -49,7 +48,7 @@ public static async Task RunAsync() chatHistory.AddAssistantMessage(reply); await MessageOutputAsync(chatHistory); - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); await MessageOutputAsync(chatHistory); reply = await chatCompletion.GenerateMessageAsync(chatHistory, settings); diff --git a/dotnet/samples/KernelSyntaxExamples/Example50_Chroma.cs b/dotnet/samples/KernelSyntaxExamples/Example50_Chroma.cs index 3b56d98a43f4..e22d196a6b73 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example50_Chroma.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example50_Chroma.cs @@ -14,14 +14,18 @@ public static class Example50_Chroma public static async Task RunAsync() { - string endpoint = Env.Var("CHROMA_ENDPOINT"); + string endpoint = TestConfiguration.Chroma.Endpoint; var memoryStore = new ChromaMemoryStore(endpoint); IKernel kernel = Kernel.Builder .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService("text-davinci-003", Env.Var("OPENAI_API_KEY")) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) + .WithOpenAITextCompletionService( + modelId: TestConfiguration.OpenAI.ModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGenerationService( + modelId: TestConfiguration.OpenAI.EmbeddingModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) .WithMemoryStorage(memoryStore) //.WithChromaMemoryStore(endpoint) // This method offers an alternative approach to registering Chroma memory store. .Build(); diff --git a/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs index 780e4f14ab27..5a6c452f00b0 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs @@ -36,23 +36,23 @@ public static async Task RunAsync() } } - public static async Task RunTextCompletion(string question) + private static async Task RunTextCompletion(string question) { Console.WriteLine("RunTextCompletion"); var kernel = GetKernel(); await RunWithQuestion(kernel, question); } - public static async Task RunChatCompletion(string question) + private static async Task RunChatCompletion(string question) { Console.WriteLine("RunChatCompletion"); var kernel = GetKernel(true); await RunWithQuestion(kernel, question); } - public static async Task RunWithQuestion(IKernel kernel, string question) + private static async Task RunWithQuestion(IKernel kernel, string question) { - using var bingConnector = new BingConnector(Env.Var("BING_API_KEY")); + var bingConnector = new BingConnector(TestConfiguration.Bing.ApiKey); var webSearchEngineSkill = new WebSearchEngineSkill(bingConnector); kernel.ImportSkill(webSearchEngineSkill, "WebSearch"); @@ -64,12 +64,12 @@ public static async Task RunWithQuestion(IKernel kernel, string question) Stopwatch sw = new(); Console.WriteLine("Question: " + question); - var config = new Microsoft.SemanticKernel.Planning.Stepwise.StepwisePlannerConfig(); - config.ExcludedFunctions.Add("TranslateMathProblem"); - config.MinIterationTimeMs = 1500; - config.MaxTokens = 4000; + var plannerConfig = new Microsoft.SemanticKernel.Planning.Stepwise.StepwisePlannerConfig(); + plannerConfig.ExcludedFunctions.Add("TranslateMathProblem"); + plannerConfig.MinIterationTimeMs = 1500; + plannerConfig.MaxTokens = 4000; - StepwisePlanner planner = new(kernel, config); + StepwisePlanner planner = new(kernel, plannerConfig); sw.Start(); var plan = planner.CreatePlan(question); @@ -95,18 +95,18 @@ private static IKernel GetKernel(bool useChat = false) if (useChat) { builder.WithAzureChatCompletionService( - Env.Var("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY"), + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey, alsoAsTextCompletion: true, setAsDefault: true); } else { builder.WithAzureTextCompletionService( - Env.Var("AZURE_OPENAI_DEPLOYMENT_NAME"), - Env.Var("AZURE_OPENAI_ENDPOINT"), - Env.Var("AZURE_OPENAI_KEY")); + TestConfiguration.AzureOpenAI.DeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); } var kernel = builder diff --git a/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs b/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs new file mode 100644 index 000000000000..86b38031e36c --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs @@ -0,0 +1,100 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Identity; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using RepoUtils; + +// ReSharper disable once InconsistentNaming +public static class Example52_ApimAuth +{ + public static async Task RunAsync() + { + // Azure API Management details + // For more information see 'Protect your Azure OpenAI API keys with Azure API Management' here: https://learn.microsoft.com/en-us/semantic-kernel/deploy/ + var apimUri = new Uri(Env.Var("Apim__Endpoint")); + var subscriptionKey = Env.Var("Apim__SubscriptionKey"); + + // Use interactive browser login + string[] scopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + var credential = new InteractiveBrowserCredential(); + var requestContext = new TokenRequestContext(scopes); + var accessToken = await credential.GetTokenAsync(requestContext); + + // Create HttpClient and include subscription key as a default header + var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", subscriptionKey); + + // Configure OpenAIClient to use + // - Custom HttpClient with subscription key header + // - Diagnostics to log error response headers from APIM to aid problem determination + // - Authentication using BearerTokenCredential retrieved via interactive browser login + var clientOptions = new OpenAIClientOptions() + { + Transport = new HttpClientTransport(httpClient), + Diagnostics = + { + LoggedHeaderNames = { "ErrorSource", "ErrorReason", "ErrorMessage", "ErrorScope", "ErrorSection", "ErrorStatusCode" }, + } + }; + var openAIClient = new OpenAIClient(apimUri, new BearerTokenCredential(accessToken), clientOptions); + + // Create logger factory with default level as warning + using ILoggerFactory loggerFactory = LoggerFactory.Create(builder => + { + builder + .SetMinimumLevel(LogLevel.Warning) + .AddConsole(); + }); + + // Example: how to use a custom OpenAIClient and configure Azure OpenAI + var kernel = Kernel.Builder + .WithLogger(loggerFactory.CreateLogger()) + .WithAzureTextCompletionService("text-davinci-003", openAIClient) + .Build(); + + // Load semantic skill defined with prompt templates + string folder = RepoFiles.SampleSkillsPath(); + + var funSkill = kernel.ImportSemanticSkillFromDirectory( + folder, + "FunSkill"); + + // Run + var result = await kernel.RunAsync( + "I have no homework", + funSkill["Excuses"] + ); + Console.WriteLine(result); + + httpClient.Dispose(); + } +} + +public class BearerTokenCredential : TokenCredential +{ + private readonly AccessToken _accessToken; + + // Constructor that takes a Bearer token string and its expiration date + public BearerTokenCredential(AccessToken accessToken) + { + this._accessToken = accessToken; + } + + public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken) + { + return this._accessToken; + } + + public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken) + { + return new ValueTask(this._accessToken); + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj b/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj index 0660b619b123..244cadfa2ea4 100644 --- a/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj +++ b/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj @@ -16,6 +16,9 @@ + + + @@ -37,6 +40,7 @@ + diff --git a/dotnet/samples/KernelSyntaxExamples/Program.cs b/dotnet/samples/KernelSyntaxExamples/Program.cs index cfba43232baf..4e2ea73ef816 100644 --- a/dotnet/samples/KernelSyntaxExamples/Program.cs +++ b/dotnet/samples/KernelSyntaxExamples/Program.cs @@ -1,164 +1,113 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Reliability; +using RepoUtils; public static class Program { // ReSharper disable once InconsistentNaming public static async Task Main() { - Example01_NativeFunctions.Run(); - Console.WriteLine("== DONE =="); - - await Example02_Pipeline.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example03_Variables.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example04_CombineLLMPromptsAndNativeCode.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example05_InlineFunctionDefinition.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example06_TemplateLanguage.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example07_BingAndGoogleSkills.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example08_RetryHandler.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example09_FunctionTypes.RunAsync(); - Console.WriteLine("== DONE =="); - - Example10_DescribeAllSkillsAndFunctions.Run(); - Console.WriteLine("== DONE =="); - - await Example11_WebSearchQueries.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example12_SequentialPlanner.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example13_ConversationSummarySkill.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example14_SemanticMemory.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example15_MemorySkill.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example16_CustomLLM.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example17_ChatGPT.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example18_DallE.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example19_Qdrant.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example20_HuggingFace.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example21_ChatGptPlugins.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example22_OpenApiSkill_AzureKeyVault.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example23_OpenApiSkill_GitHub.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example24_OpenApiSkill_Jira.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example25_ReadOnlyMemoryStore.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example26_AADAuth.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example27_SemanticFunctionsUsingChatGPT.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example28_ActionPlanner.RunAsync(); - Console.WriteLine("== DONE =="); - - Example29_Tokenizer.Run(); - Console.WriteLine("== DONE =="); - - await Example30_ChatWithPrompts.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example31_CustomPlanner.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example32_StreamingCompletion.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example33_StreamingChat.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example34_CustomChatModel.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example35_GrpcSkills.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example36_MultiCompletion.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example37_MultiStreamingCompletion.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example38_Pinecone.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example39_Postgres.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example40_DIContainer.RunAsync(); - Console.WriteLine("== DONE =="); - - Example41_HttpClientUsage.Run(); - Console.WriteLine("== DONE =="); - - Example42_KernelBuilder.Run(); - Console.WriteLine("== DONE =="); - - await Example43_GetModelResult.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example44_MultiChatCompletion.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example45_MultiStreamingChatCompletion.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example46_Weaviate.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example47_Redis.RunAsync(); - Console.WriteLine("== DONE =="); - - await Example48_GroundednessChecks.RunAsync(); - Console.WriteLine("== DONE =="); + // Load configuration from environment variables or user secrets. + LoadUserSecrets(); + + // Execution canceled if the user presses Ctrl+C. + using CancellationTokenSource cancellationTokenSource = new(); + CancellationToken cancelToken = cancellationTokenSource.ConsoleCancellationToken(); + + // Run examples + await Example01_NativeFunctions.RunAsync().SafeWaitAsync(cancelToken); + await Example02_Pipeline.RunAsync().SafeWaitAsync(cancelToken); + await Example03_Variables.RunAsync().SafeWaitAsync(cancelToken); + await Example04_CombineLLMPromptsAndNativeCode.RunAsync().SafeWaitAsync(cancelToken); + await Example05_InlineFunctionDefinition.RunAsync().SafeWaitAsync(cancelToken); + await Example06_TemplateLanguage.RunAsync().SafeWaitAsync(cancelToken); + await Example07_BingAndGoogleSkills.RunAsync().SafeWaitAsync(cancelToken); + await Example08_RetryHandler.RunAsync().SafeWaitAsync(cancelToken); + await Example09_FunctionTypes.RunAsync().SafeWaitAsync(cancelToken); + await Example10_DescribeAllSkillsAndFunctions.RunAsync().SafeWaitAsync(cancelToken); + await Example11_WebSearchQueries.RunAsync().SafeWaitAsync(cancelToken); + await Example12_SequentialPlanner.RunAsync().SafeWaitAsync(cancelToken); + await Example13_ConversationSummarySkill.RunAsync().SafeWaitAsync(cancelToken); + await Example14_SemanticMemory.RunAsync().SafeWaitAsync(cancelToken); + await Example15_MemorySkill.RunAsync().SafeWaitAsync(cancelToken); + await Example16_CustomLLM.RunAsync().SafeWaitAsync(cancelToken); + await Example17_ChatGPT.RunAsync().SafeWaitAsync(cancelToken); + await Example18_DallE.RunAsync().SafeWaitAsync(cancelToken); + await Example19_Qdrant.RunAsync().SafeWaitAsync(cancelToken); + await Example20_HuggingFace.RunAsync().SafeWaitAsync(cancelToken); + await Example21_ChatGptPlugins.RunAsync().SafeWaitAsync(cancelToken); + await Example22_OpenApiSkill_AzureKeyVault.RunAsync().SafeWaitAsync(cancelToken); + await Example23_OpenApiSkill_GitHub.RunAsync().SafeWaitAsync(cancelToken); + await Example24_OpenApiSkill_Jira.RunAsync().SafeWaitAsync(cancelToken); + await Example25_ReadOnlyMemoryStore.RunAsync().SafeWaitAsync(cancelToken); + await Example26_AADAuth.RunAsync().SafeWaitAsync(cancelToken); + await Example27_SemanticFunctionsUsingChatGPT.RunAsync().SafeWaitAsync(cancelToken); + await Example28_ActionPlanner.RunAsync().SafeWaitAsync(cancelToken); + await Example29_Tokenizer.RunAsync().SafeWaitAsync(cancelToken); + await Example30_ChatWithPrompts.RunAsync().SafeWaitAsync(cancelToken); + await Example31_CustomPlanner.RunAsync().SafeWaitAsync(cancelToken); + await Example32_StreamingCompletion.RunAsync().SafeWaitAsync(cancelToken); + await Example33_StreamingChat.RunAsync().SafeWaitAsync(cancelToken); + await Example34_CustomChatModel.RunAsync().SafeWaitAsync(cancelToken); + await Example35_GrpcSkills.RunAsync().SafeWaitAsync(cancelToken); + await Example36_MultiCompletion.RunAsync().SafeWaitAsync(cancelToken); + await Example37_MultiStreamingCompletion.RunAsync().SafeWaitAsync(cancelToken); + await Example38_Pinecone.RunAsync().SafeWaitAsync(cancelToken); + await Example39_Postgres.RunAsync().SafeWaitAsync(cancelToken); + await Example40_DIContainer.RunAsync().SafeWaitAsync(cancelToken); + await Example41_HttpClientUsage.RunAsync().SafeWaitAsync(cancelToken); + await Example42_KernelBuilder.RunAsync().SafeWaitAsync(cancelToken); + await Example43_GetModelResult.RunAsync().SafeWaitAsync(cancelToken); + await Example44_MultiChatCompletion.RunAsync().SafeWaitAsync(cancelToken); + await Example45_MultiStreamingChatCompletion.RunAsync().SafeWaitAsync(cancelToken); + await Example46_Weaviate.RunAsync().SafeWaitAsync(cancelToken); + await Example47_Redis.RunAsync().SafeWaitAsync(cancelToken); + await Example48_GroundednessChecks.RunAsync().SafeWaitAsync(cancelToken); + await Example49_LogitBias.RunAsync().SafeWaitAsync(cancelToken); + await Example50_Chroma.RunAsync().SafeWaitAsync(cancelToken); + await Example51_StepwisePlanner.RunAsync().SafeWaitAsync(cancelToken); + await Example52_ApimAuth.RunAsync().SafeWaitAsync(cancelToken); + } - await Example49_LogitBias.RunAsync(); - Console.WriteLine("== DONE =="); + private static void LoadUserSecrets() + { + IConfigurationRoot configRoot = new ConfigurationBuilder() + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + TestConfiguration.Initialize(configRoot); + } - await Example50_Chroma.RunAsync(); - Console.WriteLine("== DONE =="); + private static CancellationToken ConsoleCancellationToken(this CancellationTokenSource tokenSource) + { + Console.CancelKeyPress += (s, e) => + { + Console.WriteLine("Canceling..."); + tokenSource.Cancel(); + e.Cancel = true; + }; + + return tokenSource.Token; + } - await Example51_StepwisePlanner.RunAsync(); - Console.WriteLine("== DONE =="); + private static async Task SafeWaitAsync(this Task task, + CancellationToken cancellationToken = default) + { + try + { + await task.WaitAsync(cancellationToken); + Console.WriteLine("== DONE =="); + } + catch (ConfigurationNotFoundException ex) + { + Console.WriteLine($"{ex.Message}. Skipping example."); + } + + cancellationToken.ThrowIfCancellationRequested(); } } diff --git a/dotnet/samples/KernelSyntaxExamples/README.md b/dotnet/samples/KernelSyntaxExamples/README.md index 723f282eb78c..81562e86e582 100644 --- a/dotnet/samples/KernelSyntaxExamples/README.md +++ b/dotnet/samples/KernelSyntaxExamples/README.md @@ -12,52 +12,131 @@ to avoid the risk of leaking secrets into the repository, branches and pull requ You can also use environment variables if you prefer. To set your secrets with Secret Manager: - ``` cd dotnet/samples/KernelSyntaxExamples -dotnet user-secrets set "BING_API_KEY" "..." -dotnet user-secrets set "OPENAI_API_KEY" "..." -dotnet user-secrets set "AZURE_OPENAI_SERVICE_ID" "..." -dotnet user-secrets set "AZURE_OPENAI_DEPLOYMENT_NAME" "..." -dotnet user-secrets set "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME" "..." -dotnet user-secrets set "AZURE_OPENAI_ENDPOINT" "https://... .openai.azure.com/" -dotnet user-secrets set "AZURE_OPENAI_KEY" "..." -dotnet user-secrets set "AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME" "..." -dotnet user-secrets set "AZURE_OPENAI_EMBEDDINGS_ENDPOINT" "https://... .openai.azure.com/" -dotnet user-secrets set "AZURE_OPENAI_EMBEDDINGS_KEY" "..." -dotnet user-secrets set "ACS_ENDPOINT" "https://... .search.windows.net" -dotnet user-secrets set "ACS_API_KEY" "..." -dotnet user-secrets set "QDRANT_ENDPOINT" "..." -dotnet user-secrets set "QDRANT_PORT" "..." -dotnet user-secrets set "WEAVIATE_SCHEME" "..." -dotnet user-secrets set "WEAVIATE_ENDPOINT" "..." -dotnet user-secrets set "WEAVIATE_PORT" "..." -dotnet user-secrets set "WEAVIATE_APIKEY" "..." -dotnet user-secrets set "GITHUB_PERSONAL_ACCESS_TOKEN" "github_pat_..." -dotnet user-secrets set "POSTGRES_CONNECTIONSTRING" "..." -dotnet user-secrets set "REDIS_CONFIGURATION" "..." +dotnet user-secrets init + +dotnet user-secrets set "OpenAI:ModelId" "..." +dotnet user-secrets set "OpenAI:ChatModelId" "..." +dotnet user-secrets set "OpenAI:EmbeddingModelId" "..." +dotnet user-secrets set "OpenAI:ApiKey" "..." + +dotnet user-secrets set "AzureOpenAI:ServiceId" "..." +dotnet user-secrets set "AzureOpenAI:DeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" +dotnet user-secrets set "AzureOpenAI:ApiKey" "..." + +dotnet user-secrets set "AzureOpenAIEmbeddings:DeploymentName" "..." +dotnet user-secrets set "AzureOpenAIEmbeddings:Endpoint" "https://... .openai.azure.com/" +dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..." + +dotnet user-secrets set "ACS:Endpoint" "https://... .search.windows.net" +dotnet user-secrets set "ACS:ApiKey" "..." + +dotnet user-secrets set "Qdrant:Endpoint" "..." +dotnet user-secrets set "Qdrant:Port" "..." + +dotnet user-secrets set "Weaviate:Scheme" "..." +dotnet user-secrets set "Weaviate:Endpoint" "..." +dotnet user-secrets set "Weaviate:Port" "..." +dotnet user-secrets set "Weaviate:ApiKey" "..." + +dotnet user-secrets set "KeyVault:Endpoint" "..." +dotnet user-secrets set "KeyVault:ClientId" "..." +dotnet user-secrets set "KeyVault:TenantId" "..." + +dotnet user-secrets set "HuggingFace:ApiKey" "..." +dotnet user-secrets set "HuggingFace:ModelId" "..." + +dotnet user-secrets set "Pinecone:ApiKey" "..." +dotnet user-secrets set "Pinecone:Environment" "..." + +dotnet user-secrets set "Jira:ApiKey" "..." +dotnet user-secrets set "Jira:Email" "..." +dotnet user-secrets set "Jira:Domain" "..." + +dotnet user-secrets set "Bing:ApiKey" "..." + +dotnet user-secrets set "Google:ApiKey" "..." +dotnet user-secrets set "Google:SearchEngineId" "..." + +dotnet user-secrets set "Github:PAT" "github_pat_..." + +dotnet user-secrets set "Apim:Endpoint" "https://apim...azure-api.net/" +dotnet user-secrets set "Apim:SubscriptionKey" "..." + +dotnet user-secrets set "Postgres:ConnectionString" "..." +dotnet user-secrets set "Redis:Configuration" "..." ``` To set your secrets with environment variables, use these names: +``` +# OpenAI +OpenAI__ModelId +OpenAI__ChatModelId +OpenAI__EmbeddingModelId +OpenAI__ApiKey + +# Azure OpenAI +AzureOpenAI__ServiceId +AzureOpenAI__DeploymentName +AzureOpenAI__ChatDeploymentName +AzureOpenAI__Endpoint +AzureOpenAI__ApiKey + +AzureOpenAIEmbeddings__DeploymentName +AzureOpenAIEmbeddings__Endpoint +AzureOpenAIEmbeddings__ApiKey + +# Azure Cognitive Search +ACS__Endpoint +ACS__ApiKey + +# Qdrant +Qdrant__Endpoint +Qdrant__Port + +# Weaviate +Weaviate__Scheme +Weaviate__Endpoint +Weaviate__Port +Weaviate__ApiKey + +# Azure Key Vault +KeyVault__Endpoint +KeyVault__ClientId +KeyVault__TenantId + +# Hugging Face +HuggingFace__ApiKey +HuggingFace__ModelId -* BING_API_KEY -* OPENAI_API_KEY -* AZURE_OPENAI_SERVICE_ID -* AZURE_OPENAI_DEPLOYMENT_NAME -* AZURE_OPENAI_ENDPOINT -* AZURE_OPENAI_KEY -* ACS_ENDPOINT -* ACS_API_KEY -* QDRANT_ENDPOINT -* QDRANT_PORT -* WEAVIATE_SCHEME -* WEAVIATE_ENDPOINT -* WEAVIATE_PORT -* WEAVIATE_APIKEY -* GITHUB_PERSONAL_ACCESS_TOKEN -* POSTGRES_CONNECTIONSTRING -* REDIS_CONFIGURATION -* AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME -* AZURE_OPENAI_EMBEDDINGS_ENDPOINT -* AZURE_OPENAI_EMBEDDINGS_KEY +# Pinecone +Pinecone__ApiKey +Pinecone__Environment + +# Jira +Jira__ApiKey +Jira__Email +Jira__Domain + +# Bing +Bing__ApiKey + +# Google +Google__ApiKey +Google__SearchEngineId + +# Github +Github__PAT + +# Azure API Management (APIM) +Apim__Endpoint +Apim__SubscriptionKey + +# Other +Postgres__ConnectionString +Redis__Configuration +``` diff --git a/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs b/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs new file mode 100644 index 000000000000..2ce154ea7668 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Reliability; +public sealed class ConfigurationNotFoundException : Exception +{ + public string? Section { get; } + public string? Key { get; } + + public ConfigurationNotFoundException(string section, string key) + : base($"Configuration key '{section}:{key}' not found") + { + this.Section = section; + this.Key = key; + } + + public ConfigurationNotFoundException(string section) + : base($"Configuration section '{section}' not found") + { + this.Section = section; + } + + public ConfigurationNotFoundException() : base() + { + } + + public ConfigurationNotFoundException(string? message, Exception? innerException) : base(message, innerException) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/PlanExtensions.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/PlanExtensions.cs index eb92d176a534..792faf150ebb 100644 --- a/dotnet/samples/KernelSyntaxExamples/RepoUtils/PlanExtensions.cs +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/PlanExtensions.cs @@ -1,41 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Linq; using Microsoft.SemanticKernel.Planning; namespace RepoUtils; internal static class PlanExtensions { - internal static string ToPlanString(this Plan originalPlan, string indent = " ") + internal static string ToPlanWithGoalString(this Plan plan, string indent = " ") { - string goalHeader = $"{indent}Goal: {originalPlan.Description}\n\n{indent}Steps:\n"; + string goalHeader = $"{indent}Goal: {plan.Description}\n\n{indent}Steps:\n"; - string stepItems = string.Join("\n", originalPlan.Steps.Select(step => - { - if (step.Steps.Count == 0) - { - string skillName = step.SkillName; - string stepName = step.Name; - - string parameters = string.Join(" ", step.Parameters.Select(param => $"{param.Key}='{param.Value}'")); - if (!string.IsNullOrEmpty(parameters)) - { - parameters = $" {parameters}"; - } - - string? outputs = step.Outputs.FirstOrDefault(); - if (!string.IsNullOrEmpty(outputs)) - { - outputs = $" => {outputs}"; - } - - return $"{indent}{indent}- {string.Join(".", skillName, stepName)}{parameters}{outputs}"; - } - - return step.ToPlanString(indent + indent); - })); - - return goalHeader + stepItems; + return goalHeader + plan.ToPlanString(); } } diff --git a/dotnet/samples/KernelSyntaxExamples/Skills/EmailSkill.cs b/dotnet/samples/KernelSyntaxExamples/Skills/EmailSkill.cs index c340aa445624..0a029cdae571 100644 --- a/dotnet/samples/KernelSyntaxExamples/Skills/EmailSkill.cs +++ b/dotnet/samples/KernelSyntaxExamples/Skills/EmailSkill.cs @@ -20,7 +20,9 @@ public string GetEmailAddress( [Description("The name of the person whose email address needs to be found.")] string input, ILogger? logger = null) { - logger?.LogDebug("Returning hard coded email for {0}", input); + // Sensitive data, logging as trace, disabled by default + logger?.LogTrace("Returning hard coded email for {0}", input); + return "johndoe1234@example.com"; } } diff --git a/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs b/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs new file mode 100644 index 000000000000..1c2ff6f60078 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs @@ -0,0 +1,158 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Runtime.CompilerServices; +using Microsoft.Extensions.Configuration; +using Reliability; + +public sealed class TestConfiguration +{ + private IConfigurationRoot _configRoot; + private static TestConfiguration? s_instance; + + private TestConfiguration(IConfigurationRoot configRoot) + { + this._configRoot = configRoot; + } + + public static void Initialize(IConfigurationRoot configRoot) + { + s_instance = new TestConfiguration(configRoot); + } + + public static OpenAIConfig OpenAI => LoadSection(); + public static AzureOpenAIConfig AzureOpenAI => LoadSection(); + public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); + public static ACSConfig ACS => LoadSection(); + public static QdrantConfig Qdrant => LoadSection(); + public static WeaviateConfig Weaviate => LoadSection(); + public static KeyVaultConfig KeyVault => LoadSection(); + public static HuggingFaceConfig HuggingFace => LoadSection(); + public static PineconeConfig Pinecone => LoadSection(); + public static BingConfig Bing => LoadSection(); + public static GoogleConfig Google => LoadSection(); + public static GithubConfig Github => LoadSection(); + public static PostgresConfig Postgres => LoadSection(); + public static RedisConfig Redis => LoadSection(); + public static JiraConfig Jira => LoadSection(); + public static ChromaConfig Chroma => LoadSection(); + + private static T LoadSection([CallerMemberName] string? caller = null) + { + if (s_instance == null) + { + throw new InvalidOperationException( + "TestConfiguration must be initialized with a call to Initialize(IConfigurationRoot) before accessing configuration values."); + } + + if (string.IsNullOrEmpty(caller)) + { + throw new ArgumentNullException(nameof(caller)); + } + return s_instance._configRoot.GetSection(caller).Get() ?? + throw new ConfigurationNotFoundException(section: caller); + } + +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. + public class OpenAIConfig + { + public string ModelId { get; set; } + public string ChatModelId { get; set; } + public string EmbeddingModelId { get; set; } + public string ApiKey { get; set; } + } + + public class AzureOpenAIConfig + { + public string ServiceId { get; set; } + public string DeploymentName { get; set; } + public string ChatDeploymentName { get; set; } + public string Endpoint { get; set; } + public string ApiKey { get; set; } + } + + public class AzureOpenAIEmbeddingsConfig + { + public string DeploymentName { get; set; } + public string Endpoint { get; set; } + public string ApiKey { get; set; } + } + + public class ACSConfig + { + public string Endpoint { get; set; } + public string ApiKey { get; set; } + } + + public class QdrantConfig + { + public string Endpoint { get; set; } + public string Port { get; set; } + } + + public class WeaviateConfig + { + public string Scheme { get; set; } + public string Endpoint { get; set; } + public string Port { get; set; } + public string ApiKey { get; set; } + } + + public class KeyVaultConfig + { + public string Endpoint { get; set; } + public string ClientId { get; set; } + public string TenantId { get; set; } + } + + public class HuggingFaceConfig + { + public string ApiKey { get; set; } + public string ModelId { get; set; } + } + + public class PineconeConfig + { + public string ApiKey { get; set; } + public string Environment { get; set; } + } + + public class BingConfig + { + public string ApiKey { get; set; } + } + + public class GoogleConfig + { + public string ApiKey { get; set; } + public string SearchEngineId { get; set; } + } + + public class GithubConfig + { + public string PAT { get; set; } + } + + public class PostgresConfig + { + public string ConnectionString { get; set; } + } + + public class RedisConfig + { + public string Configuration { get; set; } + } + + public class JiraConfig + { + public string ApiKey { get; set; } + public string Email { get; set; } + public string Domain { get; set; } + } + + public class ChromaConfig + { + public string Endpoint { get; set; } + } +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. +} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs index 103d2200e058..fbfe4c330207 100644 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs +++ b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs @@ -16,7 +16,9 @@ namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; /// /// HuggingFace text completion service. /// -public sealed class HuggingFaceTextCompletion : ITextCompletion, IDisposable +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +public sealed class HuggingFaceTextCompletion : ITextCompletion +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { private const string HttpUserAgent = "Microsoft-Semantic-Kernel"; private const string HuggingFaceApiEndpoint = "https://api-inference.huggingface.co/models"; @@ -24,27 +26,8 @@ public sealed class HuggingFaceTextCompletion : ITextCompletion, IDisposable private readonly string _model; private readonly string? _endpoint; private readonly HttpClient _httpClient; - private readonly bool _disposeHttpClient = true; private readonly string? _apiKey; - /// - /// Initializes a new instance of the class. - /// - /// Endpoint for service API call. - /// Model to use for service API call. - /// Instance of to setup specific scenarios. - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public HuggingFaceTextCompletion(Uri endpoint, string model, HttpClientHandler httpClientHandler) - { - Verify.NotNull(endpoint); - Verify.NotNullOrWhiteSpace(model); - - this._endpoint = endpoint.AbsoluteUri; - this._model = model; - - this._httpClient = new(httpClientHandler); - } - /// /// Initializes a new instance of the class. /// Using default implementation. @@ -60,39 +43,6 @@ public HuggingFaceTextCompletion(Uri endpoint, string model) this._model = model; this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._disposeHttpClient = false; // Disposal is unnecessary as a non-disposable handler is used. - } - - /// - /// Initializes a new instance of the class. - /// Using HuggingFace API for service call, see https://huggingface.co/docs/api-inference/index. - /// - /// HuggingFace API key, see https://huggingface.co/docs/api-inference/quicktour#running-inference-with-api-requests. - /// Model to use for service API call. - /// Instance of to setup specific scenarios. - /// Endpoint for service API call. - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public HuggingFaceTextCompletion(string apiKey, string model, HttpClientHandler httpClientHandler, string endpoint = HuggingFaceApiEndpoint) - : this(new Uri(endpoint), model, httpClientHandler) - { - Verify.NotNullOrWhiteSpace(apiKey); - this._apiKey = apiKey; - } - - /// - /// Initializes a new instance of the class. - /// Using HuggingFace API for service call, see https://huggingface.co/docs/api-inference/index. - /// Using default implementation. - /// - /// HuggingFace API key, see https://huggingface.co/docs/api-inference/quicktour#running-inference-with-api-requests. - /// Model to use for service API call. - /// Endpoint for service API call. - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public HuggingFaceTextCompletion(string apiKey, string model, string endpoint = HuggingFaceApiEndpoint) - : this(new Uri(endpoint), model) - { - Verify.NotNullOrWhiteSpace(apiKey); - this._apiKey = apiKey; } /// @@ -112,7 +62,6 @@ public HuggingFaceTextCompletion(string model, string? apiKey = null, HttpClient this._apiKey = apiKey; this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); this._endpoint = endpoint; - this._disposeHttpClient = false; // Disposal is unnecessary as we either use a non-disposable handler or utilize a custom HTTP client that we should not dispose. } /// @@ -136,16 +85,6 @@ public async Task> GetCompletionsAsync( return await this.ExecuteGetCompletionsAsync(text, cancellationToken).ConfigureAwait(false); } - /// - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void Dispose() - { - if (this._disposeHttpClient) - { - this._httpClient.Dispose(); - } - } - #region private ================================================================================ private async Task> ExecuteGetCompletionsAsync(string text, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs index 95ec3e914216..922d2d1802a3 100644 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs +++ b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs @@ -16,32 +16,15 @@ namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; /// /// HuggingFace embedding generation service. /// -public sealed class HuggingFaceTextEmbeddingGeneration : ITextEmbeddingGeneration, IDisposable +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +public sealed class HuggingFaceTextEmbeddingGeneration : ITextEmbeddingGeneration +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { private const string HttpUserAgent = "Microsoft-Semantic-Kernel"; private readonly string _model; private readonly string? _endpoint; private readonly HttpClient _httpClient; - private readonly bool _disposeHttpClient = true; - - /// - /// Initializes a new instance of the class. - /// - /// Endpoint for service API call. - /// Model to use for service API call. - /// Instance of to setup specific scenarios. - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public HuggingFaceTextEmbeddingGeneration(Uri endpoint, string model, HttpClientHandler httpClientHandler) - { - Verify.NotNull(endpoint); - Verify.NotNullOrWhiteSpace(model); - - this._endpoint = endpoint.AbsoluteUri; - this._model = model; - - this._httpClient = new(httpClientHandler); - } /// /// Initializes a new instance of the class. @@ -58,7 +41,6 @@ public HuggingFaceTextEmbeddingGeneration(Uri endpoint, string model) this._model = model; this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._disposeHttpClient = false; // Disposal is unnecessary as we either use a non-disposable handler or utilize a custom HTTP client that we should not dispose. } /// @@ -75,7 +57,6 @@ public HuggingFaceTextEmbeddingGeneration(string model, string endpoint) this._endpoint = endpoint; this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._disposeHttpClient = false; // Disposal is unnecessary as we either use a non-disposable handler or utilize a custom HTTP client that we should not dispose. } /// @@ -99,8 +80,6 @@ public HuggingFaceTextEmbeddingGeneration(string model, HttpClient httpClient, s AIException.ErrorCodes.InvalidConfiguration, "The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); } - - this._disposeHttpClient = false; // We should not dispose custom HTTP clients. } /// @@ -109,16 +88,6 @@ public async Task>> GenerateEmbeddingsAsync(IList return await this.ExecuteEmbeddingRequestAsync(data, cancellationToken).ConfigureAwait(false); } - /// - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void Dispose() - { - if (this._disposeHttpClient) - { - this._httpClient.Dispose(); - } - } - #region private ================================================================================ /// diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/Connectors.AI.Oobabooga.csproj b/dotnet/src/Connectors/Connectors.AI.Oobabooga/Connectors.AI.Oobabooga.csproj new file mode 100644 index 000000000000..6daa5aaab4c1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/Connectors.AI.Oobabooga.csproj @@ -0,0 +1,28 @@ + + + + + Microsoft.SemanticKernel.Connectors.AI.Oobabooga + $(AssemblyName) + netstandard2.0 + + + + + + + + + Semantic Kernel - Oobabooga Connector + Semantic Kernel connector for the oobabooga text-generation-webui open source project. Contains a client for text completion. + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaInvalidResponseException.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaInvalidResponseException.cs new file mode 100644 index 000000000000..a2e8e51d2a57 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaInvalidResponseException.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.AI; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +#pragma warning disable RCS1194 // Implement exception constructors. +internal sealed class OobaboogaInvalidResponseException : AIException +{ + public T? ResponseData { get; } + + public OobaboogaInvalidResponseException(T? responseData, string? message = null) : base(ErrorCodes.InvalidResponseContent, message) + { + this.ResponseData = responseData; + } +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaTextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaTextCompletion.cs new file mode 100644 index 000000000000..e8d41d7b9411 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/OobaboogaTextCompletion.cs @@ -0,0 +1,475 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Net.WebSockets; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AI; +using Microsoft.SemanticKernel.AI.TextCompletion; +using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +/// +/// Oobabooga text completion service API. +/// Adapted from +/// +public sealed class OobaboogaTextCompletion : ITextCompletion +{ + public const string HttpUserAgent = "Microsoft-Semantic-Kernel"; + public const string BlockingUriPath = "/api/v1/generate"; + private const string StreamingUriPath = "/api/v1/stream"; + + private readonly UriBuilder _blockingUri; + private readonly UriBuilder _streamingUri; + private readonly HttpClient _httpClient; + private readonly Func _webSocketFactory; + private readonly bool _useWebSocketsPooling; + private readonly int _maxNbConcurrentWebSockets; + private readonly SemaphoreSlim? _concurrentSemaphore; + private readonly ConcurrentBag? _activeConnections; + private readonly ConcurrentBag _webSocketPool = new(); + private readonly int _keepAliveWebSocketsDuration; + private readonly ILogger? _logger; + private long _lastCallTicks = long.MaxValue; + + /// + /// Controls the size of the buffer used to received websocket packets + /// + public int WebSocketBufferSize { get; set; } = 2048; + + /// + /// Initializes a new instance of the class. + /// + /// The service API endpoint to which requests should be sent. + /// The port used for handling blocking requests. Default value is 5000 + /// The port used for handling streaming requests. Default value is 5005 + /// You can optionally set a hard limit on the max number of concurrent calls to the either of the completion methods by providing a . Calls in excess will wait for existing consumers to release the semaphore + /// Optional. The HTTP client used for making blocking API requests. If not specified, a default client will be used. + /// If true, websocket clients will be recycled in a reusable pool as long as concurrent calls are detected + /// if websocket pooling is enabled, you can provide an optional CancellationToken to properly dispose of the clean up tasks when disposing of the connector + /// When pooling is enabled, pooled websockets are flushed on a regular basis when no more connections are made. This is the time to keep them in pool before flushing + /// The WebSocket factory used for making streaming API requests. Note that only when pooling is enabled will websocket be recycled and reused for the specified duration. Otherwise, a new websocket is created for each call and closed and disposed afterwards, to prevent data corruption from concurrent calls. + /// Application logger + public OobaboogaTextCompletion(Uri endpoint, + int blockingPort = 5000, + int streamingPort = 5005, + SemaphoreSlim? concurrentSemaphore = null, + HttpClient? httpClient = null, + bool useWebSocketsPooling = true, + CancellationToken? webSocketsCleanUpCancellationToken = default, + int keepAliveWebSocketsDuration = 100, + Func? webSocketFactory = null, + ILogger? logger = null) + { + Verify.NotNull(endpoint); + this._blockingUri = new UriBuilder(endpoint) + { + Port = blockingPort, + Path = BlockingUriPath + }; + this._streamingUri = new(endpoint) + { + Port = streamingPort, + Path = StreamingUriPath + }; + if (this._streamingUri.Uri.Scheme.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + this._streamingUri.Scheme = (this._streamingUri.Scheme == "https") ? "wss" : "ws"; + } + + this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + this._useWebSocketsPooling = useWebSocketsPooling; + this._keepAliveWebSocketsDuration = keepAliveWebSocketsDuration; + this._logger = logger; + if (webSocketFactory != null) + { + this._webSocketFactory = () => + { + var webSocket = webSocketFactory(); + this.SetWebSocketOptions(webSocket); + return webSocket; + }; + } + else + { + this._webSocketFactory = () => + { + ClientWebSocket webSocket = new(); + this.SetWebSocketOptions(webSocket); + return webSocket; + }; + } + + // if a hard limit is defined, we use a semaphore to limit the number of concurrent calls, otherwise, we use a stack to track active connections + if (concurrentSemaphore != null) + { + this._concurrentSemaphore = concurrentSemaphore; + this._maxNbConcurrentWebSockets = concurrentSemaphore.CurrentCount; + } + else + { + this._activeConnections = new(); + this._maxNbConcurrentWebSockets = 0; + } + + if (this._useWebSocketsPooling) + { + this.StartCleanupTask(webSocketsCleanUpCancellationToken ?? CancellationToken.None); + } + } + + /// + public async IAsyncEnumerable GetStreamingCompletionsAsync( + string text, + CompleteRequestSettings requestSettings, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await this.StartConcurrentCallAsync(cancellationToken).ConfigureAwait(false); + + var completionRequest = this.CreateOobaboogaRequest(text, requestSettings); + + var requestJson = JsonSerializer.Serialize(completionRequest); + + var requestBytes = Encoding.UTF8.GetBytes(requestJson); + + ClientWebSocket? clientWebSocket = null; + try + { + // if pooling is enabled, web socket is going to be recycled for reuse, if not it will be properly disposed of after the call +#pragma warning disable CA2000 // Dispose objects before losing scope + if (!this._useWebSocketsPooling || !this._webSocketPool.TryTake(out clientWebSocket)) + { + clientWebSocket = this._webSocketFactory(); + } +#pragma warning restore CA2000 // Dispose objects before losing scope + if (clientWebSocket.State == WebSocketState.None) + { + await clientWebSocket.ConnectAsync(this._streamingUri.Uri, cancellationToken).ConfigureAwait(false); + } + + var sendSegment = new ArraySegment(requestBytes); + await clientWebSocket.SendAsync(sendSegment, WebSocketMessageType.Text, true, cancellationToken).ConfigureAwait(false); + + TextCompletionStreamingResult streamingResult = new(); + + var processingTask = this.ProcessWebSocketMessagesAsync(clientWebSocket, streamingResult, cancellationToken); + + yield return streamingResult; + + // Await the processing task to make sure it's finished before continuing + await processingTask.ConfigureAwait(false); + } + finally + { + if (clientWebSocket != null) + { + if (this._useWebSocketsPooling && clientWebSocket.State == WebSocketState.Open) + { + this._webSocketPool.Add(clientWebSocket); + } + else + { + await this.DisposeClientGracefullyAsync(clientWebSocket).ConfigureAwait(false); + } + } + + this.FinishConcurrentCall(); + } + } + + /// + public async Task> GetCompletionsAsync( + string text, + CompleteRequestSettings requestSettings, + CancellationToken cancellationToken = default) + { + try + { + await this.StartConcurrentCallAsync(cancellationToken).ConfigureAwait(false); + + var completionRequest = this.CreateOobaboogaRequest(text, requestSettings); + + using var stringContent = new StringContent( + JsonSerializer.Serialize(completionRequest), + Encoding.UTF8, + "application/json"); + + using var httpRequestMessage = new HttpRequestMessage() + { + Method = HttpMethod.Post, + RequestUri = this._blockingUri.Uri, + Content = stringContent + }; + httpRequestMessage.Headers.Add("User-Agent", HttpUserAgent); + + using var response = await this._httpClient.SendAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var body = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + + TextCompletionResponse? completionResponse = JsonSerializer.Deserialize(body); + + if (completionResponse is null) + { + throw new OobaboogaInvalidResponseException(body, "Unexpected response from Oobabooga API"); + } + + return completionResponse.Results.Select(completionText => new TextCompletionResult(completionText)).ToList(); + } + catch (Exception e) when (e is not AIException && !e.IsCriticalException()) + { + throw new AIException( + AIException.ErrorCodes.UnknownError, + $"Something went wrong: {e.Message}", e); + } + finally + { + this.FinishConcurrentCall(); + } + } + + #region private ================================================================================ + + /// + /// Creates an Oobabooga request, mapping CompleteRequestSettings fields to their Oobabooga API counter parts + /// + /// The text to complete. + /// The request settings. + /// An Oobabooga TextCompletionRequest object with the text and completion parameters. + private TextCompletionRequest CreateOobaboogaRequest(string text, CompleteRequestSettings requestSettings) + { + if (string.IsNullOrWhiteSpace(text)) + { + throw new ArgumentNullException(nameof(text)); + } + + // Prepare the request using the provided parameters. + return new TextCompletionRequest() + { + Prompt = text, + MaxNewTokens = requestSettings.MaxTokens, + Temperature = requestSettings.Temperature, + TopP = requestSettings.TopP, + RepetitionPenalty = GetRepetitionPenalty(requestSettings), + StoppingStrings = requestSettings.StopSequences.ToList() + }; + } + + /// + /// Sets the options for the , either persistent and provided by the ctor, or transient if none provided. + /// + private void SetWebSocketOptions(ClientWebSocket clientWebSocket) + { + clientWebSocket.Options.SetRequestHeader("User-Agent", HttpUserAgent); + } + + /// + /// Converts the semantic-kernel presence penalty, scaled -2:+2 with default 0 for no penalty to the Oobabooga repetition penalty, strictly positive with default 1 for no penalty. See and subsequent links for more details. + /// + private static double GetRepetitionPenalty(CompleteRequestSettings requestSettings) + { + return 1 + requestSettings.PresencePenalty / 2; + } + + /// + /// That method is responsible for processing the websocket messages that build a streaming response object. It is crucial that it is run asynchronously to prevent a deadlock with results iteration + /// + private async Task ProcessWebSocketMessagesAsync(ClientWebSocket clientWebSocket, TextCompletionStreamingResult streamingResult, CancellationToken cancellationToken) + { + var buffer = new byte[this.WebSocketBufferSize]; + var finishedProcessing = false; + while (!finishedProcessing && !cancellationToken.IsCancellationRequested) + { + MemoryStream messageStream = new(); + WebSocketReceiveResult result; + do + { + var segment = new ArraySegment(buffer); + result = await clientWebSocket.ReceiveAsync(segment, cancellationToken).ConfigureAwait(false); + await messageStream.WriteAsync(buffer, 0, result.Count, cancellationToken).ConfigureAwait(false); + } while (!result.EndOfMessage); + + messageStream.Seek(0, SeekOrigin.Begin); + + if (result.MessageType == WebSocketMessageType.Text) + { + string messageText; + using (var reader = new StreamReader(messageStream, Encoding.UTF8)) + { + messageText = await reader.ReadToEndAsync().ConfigureAwait(false); + } + + var responseObject = JsonSerializer.Deserialize(messageText); + + if (responseObject is null) + { + throw new OobaboogaInvalidResponseException(messageText, "Unexpected response from Oobabooga API"); + } + + switch (responseObject.Event) + { + case TextCompletionStreamingResponse.ResponseObjectTextStreamEvent: + streamingResult.AppendResponse(responseObject); + break; + case TextCompletionStreamingResponse.ResponseObjectStreamEndEvent: + streamingResult.SignalStreamEnd(); + if (!this._useWebSocketsPooling) + { + await clientWebSocket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Acknowledge stream-end oobabooga message", CancellationToken.None).ConfigureAwait(false); + } + + finishedProcessing = true; + break; + default: + break; + } + } + else if (result.MessageType == WebSocketMessageType.Close) + { + await clientWebSocket.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Acknowledge Close frame", CancellationToken.None).ConfigureAwait(false); + finishedProcessing = true; + } + + if (clientWebSocket.State != WebSocketState.Open) + { + finishedProcessing = true; + } + } + } + + /// + /// Starts a concurrent call, either by taking a semaphore slot or by pushing a value on the active connections stack + /// + /// + private async Task StartConcurrentCallAsync(CancellationToken cancellationToken) + { + if (this._concurrentSemaphore != null) + { + await this._concurrentSemaphore!.WaitAsync(cancellationToken).ConfigureAwait(false); + } + else + { + this._activeConnections!.Add(true); + } + } + + /// + /// Gets the number of concurrent calls, either by reading the semaphore count or by reading the active connections stack count + /// + /// + private int GetCurrentConcurrentCallsNb() + { + if (this._concurrentSemaphore != null) + { + return this._maxNbConcurrentWebSockets - this._concurrentSemaphore!.CurrentCount; + } + + return this._activeConnections!.Count; + } + + /// + /// Ends a concurrent call, either by releasing a semaphore slot or by popping a value from the active connections stack + /// + private void FinishConcurrentCall() + { + if (this._concurrentSemaphore != null) + { + this._concurrentSemaphore!.Release(); + } + else + { + this._activeConnections!.TryTake(out _); + } + + Interlocked.Exchange(ref this._lastCallTicks, DateTime.UtcNow.Ticks); + } + + private void StartCleanupTask(CancellationToken cancellationToken) + { + Task.Factory.StartNew( + async () => + { + while (!cancellationToken.IsCancellationRequested) + { + await this.FlushWebSocketClientsAsync(cancellationToken).ConfigureAwait(false); + } + }, + cancellationToken, + TaskCreationOptions.LongRunning, + TaskScheduler.Default); + } + + /// + /// Flushes the web socket clients that have been idle for too long + /// + /// + private async Task FlushWebSocketClientsAsync(CancellationToken cancellationToken) + { + // In the cleanup task, make sure you handle OperationCanceledException appropriately + // and make frequent checks on whether cancellation is requested. + try + { + if (!cancellationToken.IsCancellationRequested) + { + await Task.Delay(this._keepAliveWebSocketsDuration, cancellationToken).ConfigureAwait(false); + + // If another call was made during the delay, do not proceed with flushing + if (DateTime.UtcNow.Ticks - Interlocked.Read(ref this._lastCallTicks) < TimeSpan.FromMilliseconds(this._keepAliveWebSocketsDuration).Ticks) + { + return; + } + + while (this.GetCurrentConcurrentCallsNb() == 0 && this._webSocketPool.TryTake(out ClientWebSocket clientToDispose)) + { + await this.DisposeClientGracefullyAsync(clientToDispose).ConfigureAwait(false); + } + } + } + catch (OperationCanceledException exception) + { + this._logger?.LogTrace(message: "FlushWebSocketClientsAsync cleaning task was cancelled", exception: exception); + while (this._webSocketPool.TryTake(out ClientWebSocket clientToDispose)) + { + await this.DisposeClientGracefullyAsync(clientToDispose).ConfigureAwait(false); + } + } + } + + /// + /// Closes and disposes of a client web socket after use + /// + private async Task DisposeClientGracefullyAsync(ClientWebSocket clientWebSocket) + { + try + { + if (clientWebSocket.State == WebSocketState.Open) + { + await clientWebSocket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Closing client before disposal", CancellationToken.None).ConfigureAwait(false); + } + } + catch (OperationCanceledException exception) + { + this._logger?.LogTrace(message: "Closing client web socket before disposal was cancelled", exception: exception); + } + catch (WebSocketException exception) + { + this._logger?.LogTrace(message: "Closing client web socket before disposal raised web socket exception", exception: exception); + } + finally + { + clientWebSocket.Dispose(); + } + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionRequest.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionRequest.cs new file mode 100644 index 000000000000..8adcc088187a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionRequest.cs @@ -0,0 +1,177 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +/// +/// HTTP schema to perform oobabooga completion request. Contains many parameters, some of which are specific to certain kinds of models. +/// See and subsequent links for additional information. +/// +[Serializable] +public sealed class TextCompletionRequest +{ + /// + /// The prompt text to complete. + /// + [JsonPropertyName("prompt")] + public string Prompt { get; set; } = string.Empty; + + /// + /// The maximum number of tokens to generate, ignoring the number of tokens in the prompt. + /// + [JsonPropertyName("max_new_tokens")] + public int? MaxNewTokens { get; set; } + + /// + /// Determines whether or not to use sampling; use greedy decoding if false. + /// + [JsonPropertyName("do_sample")] + public bool DoSample { get; set; } = true; + + /// + /// Modulates the next token probabilities. A value of 0 implies deterministic output (only the most likely token is used). Higher values increase randomness. + /// + [JsonPropertyName("temperature")] + public double Temperature { get; set; } + + /// + /// If set to a value less than 1, only the most probable tokens with cumulative probability less than this value are kept for generation. + /// + [JsonPropertyName("top_p")] + public double TopP { get; set; } + + /// + /// Measures how similar the conditional probability of predicting a target token is to the expected conditional probability of predicting a random token, given the generated text. + /// + [JsonPropertyName("typical_p")] + public double TypicalP { get; set; } = 1; + + /// + /// Sets a probability floor below which tokens are excluded from being sampled. + /// + [JsonPropertyName("epsilon_cutoff")] + public double EpsilonCutoff { get; set; } + + /// + /// Used with top_p, top_k, and epsilon_cutoff set to 0. This parameter hybridizes locally typical sampling and epsilon sampling. + /// + [JsonPropertyName("eta_cutoff")] + public double EtaCutoff { get; set; } + + /// + /// Controls Tail Free Sampling (value between 0 and 1) + /// + [JsonPropertyName("tfs")] + public double Tfs { get; set; } = 1; + + /// + /// Top A Sampling is a way to pick the next word in a sentence based on how important it is in the context. Top-A considers the probability of the most likely token, and sets a limit based on its percentage. After this, remaining tokens are compared to this limit. If their probability is too low, they are removed from the pool​. + /// + [JsonPropertyName("top_a")] + public double TopA { get; set; } + + /// + /// Exponential penalty factor for repeating prior tokens. 1 means no penalty, higher value = less repetition. + /// + [JsonPropertyName("repetition_penalty")] + public double RepetitionPenalty { get; set; } = 1.18; + + /// + ///When using "top k", you select the top k most likely words to come next based on their probability of occurring, where k is a fixed number that you specify. You can use Top_K to control the amount of diversity in the model output​ + /// + [JsonPropertyName("top_k")] + public int TopK { get; set; } + + /// + /// Minimum length of the sequence to be generated. + /// + [JsonPropertyName("min_length")] + public int MinLength { get; set; } + + /// + /// If set to a value greater than 0, all ngrams of that size can only occur once. + /// + [JsonPropertyName("no_repeat_ngram_size")] + public int NoRepeatNgramSize { get; set; } + + /// + /// Number of beams for beam search. 1 means no beam search. + /// + [JsonPropertyName("num_beams")] + public int NumBeams { get; set; } = 1; + + /// + /// The values balance the model confidence and the degeneration penalty in contrastive search decoding. + /// + [JsonPropertyName("penalty_alpha")] + public int PenaltyAlpha { get; set; } + + /// + /// Exponential penalty to the length that is used with beam-based generation + /// + [JsonPropertyName("length_penalty")] + public double LengthPenalty { get; set; } = 1; + + /// + /// Controls the stopping condition for beam-based methods, like beam-search. It accepts the following values: True, where the generation stops as soon as there are num_beams complete candidates; False, where an heuristic is applied and the generation stops when is it very unlikely to find better candidates. + /// + [JsonPropertyName("early_stopping")] + public bool EarlyStopping { get; set; } + + /// + /// Parameter used for mirostat sampling in Llama.cpp, controlling perplexity during text (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) + /// + [JsonPropertyName("mirostat_mode")] + public int MirostatMode { get; set; } + + /// + /// Set the Mirostat target entropy, parameter tau (default: 5.0) + /// + [JsonPropertyName("mirostat_tau")] + public int MirostatTau { get; set; } = 5; + + /// + /// Set the Mirostat learning rate, parameter eta (default: 0.1) + /// + [JsonPropertyName("mirostat_eta")] + public double MirostatEta { get; set; } = 0.1; + + /// + /// Random seed to control sampling, used when DoSample is True. + /// + [JsonPropertyName("seed")] + public int Seed { get; set; } = -1; + + /// + /// Controls whether to add beginning of a sentence token + /// + [JsonPropertyName("add_bos_token")] + public bool AddBosToken { get; set; } = true; + + /// + /// The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048. + /// + [JsonPropertyName("truncation_length")] + public int TruncationLength { get; set; } = 2048; + + /// + /// Forces the model to never end the generation prematurely. + /// + [JsonPropertyName("ban_eos_token")] + public bool BanEosToken { get; set; } = true; + + /// + /// Some specific models need this unset. + /// + [JsonPropertyName("skip_special_tokens")] + public bool SkipSpecialTokens { get; set; } = true; + + /// + /// In addition to the defaults. Written between "" and separated by commas. For instance: "\nYour Assistant:", "\nThe assistant:" + /// + [JsonPropertyName("stopping_strings")] + public List StoppingStrings { get; set; } = new List(); +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResponse.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResponse.cs new file mode 100644 index 000000000000..e5058fe77cb2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResponse.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +/// +/// HTTP Schema for Oobabooga completion response. Contains a list of results. Adapted from +/// +public sealed class TextCompletionResponse +{ + /// + /// A field used by Oobabooga to return results from the blocking API. + /// + [JsonPropertyName("results")] + public List Results { get; set; } = new(); +} + +/// +/// HTTP Schema for an single Oobabooga result as part of a completion response. +/// +public sealed class TextCompletionResponseText +{ + /// + /// Completed text. + /// + [JsonPropertyName("text")] + public string? Text { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResult.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResult.cs new file mode 100644 index 000000000000..95097f9736ec --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionResult.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.AI.TextCompletion; +using Microsoft.SemanticKernel.Orchestration; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +/// +/// Oobabooga implementation of . Actual response object is stored in a ModelResult instance, and completion text is simply passed forward. +/// +internal sealed class TextCompletionResult : ITextResult +{ + private readonly ModelResult _responseData; + + public TextCompletionResult(TextCompletionResponseText responseData) + { + this._responseData = new ModelResult(responseData); + } + + public ModelResult ModelResult => this._responseData; + + public Task GetCompletionAsync(CancellationToken cancellationToken = default) + { + return Task.FromResult(this._responseData.GetResult().Text ?? string.Empty); + } +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResponse.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResponse.cs new file mode 100644 index 000000000000..33d9abf68401 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResponse.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +/// +/// HTTP Schema for streaming completion response. Adapted from +/// +public sealed class TextCompletionStreamingResponse +{ + public const string ResponseObjectTextStreamEvent = "text_stream"; + public const string ResponseObjectStreamEndEvent = "stream_end"; + + /// + /// A field used by Oobabooga to signal the type of websocket message sent, e.g. "text_stream" or "stream_end". + /// + [JsonPropertyName("event")] + public string Event { get; set; } = string.Empty; + + /// + /// A field used by Oobabooga to signal the number of messages sent, starting with 0 and incremented on each message. + /// + [JsonPropertyName("message_num")] + public int MessageNum { get; set; } + + /// + /// A field used by Oobabooga with the text chunk sent in the websocket message. + /// + [JsonPropertyName("text")] + public string Text { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResult.cs b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResult.cs new file mode 100644 index 000000000000..0575e6434cc2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AI.Oobabooga/TextCompletion/TextCompletionStreamingResult.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.AI.TextCompletion; +using Microsoft.SemanticKernel.Orchestration; + +namespace Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +internal sealed class TextCompletionStreamingResult : ITextStreamingResult +{ + private readonly List _modelResponses; + private readonly Channel _responseChannel; + + public ModelResult ModelResult { get; } + + public TextCompletionStreamingResult() + { + this._modelResponses = new(); + this.ModelResult = new ModelResult(this._modelResponses); + this._responseChannel = Channel.CreateUnbounded(new UnboundedChannelOptions() + { + SingleReader = true, + SingleWriter = true, + AllowSynchronousContinuations = false + }); + } + + public void AppendResponse(TextCompletionStreamingResponse response) + { + this._modelResponses.Add(response); + this._responseChannel.Writer.TryWrite(response.Text); + } + + public void SignalStreamEnd() + { + this._responseChannel.Writer.Complete(); + } + + public async Task GetCompletionAsync(CancellationToken cancellationToken = default) + { + StringBuilder resultBuilder = new(); + + await foreach (var chunk in this.GetCompletionStreamingAsync(cancellationToken)) + { + resultBuilder.Append(chunk); + } + + return resultBuilder.ToString(); + } + + public async IAsyncEnumerable GetCompletionStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + while (await this._responseChannel.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) + { + while (this._responseChannel.Reader.TryRead(out string? chunk)) + { + yield return chunk; + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs index 2f5ee1acc290..4663f1bd8cf1 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs @@ -2,6 +2,7 @@ using System; using System.Net.Http; +using System.Runtime.CompilerServices; using Azure; using Azure.AI.OpenAI; using Azure.Core; @@ -19,7 +20,7 @@ public abstract class AzureOpenAIClientBase : ClientBase private protected override OpenAIClient Client { get; } /// - /// Creates a new AzureTextCompletion client instance using API Key auth + /// Creates a new Azure OpenAI client instance using API Key auth /// /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -31,7 +32,7 @@ private protected AzureOpenAIClientBase( string endpoint, string apiKey, HttpClient? httpClient = null, - ILogger? logger = null) + ILogger? logger = null) : base(logger) { Verify.NotNullOrWhiteSpace(modelId); Verify.NotNullOrWhiteSpace(endpoint); @@ -50,19 +51,19 @@ private protected AzureOpenAIClientBase( } /// - /// Creates a new AzureTextCompletion client instance supporting AAD auth + /// Creates a new Azure OpenAI client instance supporting AAD auth /// /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. /// Custom for HTTP requests. - /// Application logger + /// Application logger private protected AzureOpenAIClientBase( string modelId, string endpoint, TokenCredential credential, HttpClient? httpClient = null, - ILogger? log = null) + ILogger? logger = null) : base(logger) { Verify.NotNullOrWhiteSpace(modelId); Verify.NotNullOrWhiteSpace(endpoint); @@ -77,4 +78,31 @@ private protected AzureOpenAIClientBase( this.ModelId = modelId; this.Client = new OpenAIClient(new Uri(endpoint), credential, options); } + + /// + /// Creates a new Azure OpenAI client instance using the specified OpenAIClient + /// + /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Application logger + private protected AzureOpenAIClientBase( + string modelId, + OpenAIClient openAIClient, + ILogger? logger = null) + { + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNull(openAIClient); + + this.ModelId = modelId; + this.Client = openAIClient; + } + + /// + /// Logs Azure OpenAI action details. + /// + /// Caller member name. Populated automatically by runtime. + private protected void LogActionDetails([CallerMemberName] string? callerMemberName = default) + { + this.Logger.LogInformation("Action: {Action}. Azure OpenAI Deployment Name: {DeploymentName}.", callerMemberName, this.ModelId); + } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs index 8fb4870c79b1..8e7bf450d1ed 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs @@ -8,6 +8,8 @@ using System.Threading.Tasks; using Azure; using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.AI; using Microsoft.SemanticKernel.AI.ChatCompletion; using Microsoft.SemanticKernel.AI.Embeddings; @@ -25,7 +27,10 @@ public abstract class ClientBase private const int MaxResultsPerPrompt = 128; // Prevent external inheritors - private protected ClientBase() { } + private protected ClientBase(ILogger? logger = null) + { + this.Logger = logger ?? NullLogger.Instance; + } /// /// Model Id or Deployment Name @@ -37,6 +42,11 @@ private protected ClientBase() { } /// private protected abstract OpenAIClient Client { get; } + /// + /// Logger instance + /// + private protected ILogger Logger { get; set; } + /// /// Creates completions for the prompt and settings. /// diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs index 76784dc29dfb..25750230dbf7 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; +using System.Runtime.CompilerServices; using Azure.AI.OpenAI; using Azure.Core; using Azure.Core.Pipeline; @@ -29,7 +30,7 @@ private protected OpenAIClientBase( string apiKey, string? organization = null, HttpClient? httpClient = null, - ILogger? logger = null) + ILogger? logger = null) : base(logger) { Verify.NotNullOrWhiteSpace(modelId); Verify.NotNullOrWhiteSpace(apiKey); @@ -49,4 +50,13 @@ private protected OpenAIClientBase( this.Client = new OpenAIClient(apiKey, options); } + + /// + /// Logs OpenAI action details. + /// + /// Caller member name. Populated automatically by runtime. + private protected void LogActionDetails([CallerMemberName] string? callerMemberName = default) + { + this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.ModelId); + } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs index 17440235f18e..27838f2a8f49 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs @@ -4,6 +4,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AI.ChatCompletion; @@ -52,12 +53,26 @@ public AzureChatCompletion( { } + /// + /// Creates a new AzureChatCompletion client instance using the specified OpenAIClient + /// + /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Application logger + public AzureChatCompletion( + string modelId, + OpenAIClient openAIClient, + ILogger? logger = null) : base(modelId, openAIClient, logger) + { + } + /// public Task> GetChatCompletionsAsync( ChatHistory chat, ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatResultsAsync(chat, requestSettings, cancellationToken); } @@ -67,6 +82,7 @@ public IAsyncEnumerable GetStreamingChatCompletionsAsync( ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatStreamingResultsAsync(chat, requestSettings, cancellationToken); } @@ -82,6 +98,7 @@ public IAsyncEnumerable GetStreamingCompletionsAsync( CompleteRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatStreamingResultsAsTextAsync(text, requestSettings, cancellationToken); } @@ -91,6 +108,7 @@ public Task> GetCompletionsAsync( CompleteRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatResultsAsTextAsync(text, requestSettings, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs index 897bfaa8fb89..d444125bff98 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs @@ -41,6 +41,7 @@ public Task> GetChatCompletionsAsync( ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatResultsAsync(chat, requestSettings, cancellationToken); } @@ -50,6 +51,7 @@ public IAsyncEnumerable GetStreamingChatCompletionsAsync( ChatRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatStreamingResultsAsync(chat, requestSettings, cancellationToken); } @@ -65,6 +67,7 @@ public IAsyncEnumerable GetStreamingCompletionsAsync( CompleteRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatStreamingResultsAsTextAsync(text, requestSettings, cancellationToken); } @@ -74,6 +77,7 @@ public Task> GetCompletionsAsync( CompleteRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetChatResultsAsTextAsync(text, requestSettings, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs index 70f60fae670b..4dcadc3c02ff 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs @@ -93,7 +93,7 @@ private protected async Task> ExecuteImageGenerationRequestAsync( } catch (Exception ex) when (ex is NotSupportedException or JsonException) { - this._log.LogTrace("Unable to extract error from response body content. Exception: {0}:{1}", ex.GetType(), ex.Message); + this._log.LogError(ex, "Unable to extract error from response body content. Exception: {0}:{1}", ex.GetType(), ex.Message); } return null; @@ -159,7 +159,7 @@ private protected async Task ExecuteRequestAsync(string url response = await this._httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); } - this._log.LogTrace("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); + this._log.LogDebug("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); if (response.IsSuccessStatusCode) { diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs index c543ab66390f..591373ef5261 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; +using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AI.ChatCompletion; @@ -88,6 +89,32 @@ public static KernelBuilder WithAzureTextCompletionService(this KernelBuilder bu return builder; } + /// + /// Adds an Azure OpenAI text completion service to the list. + /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. + /// + /// The instance + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// A local identifier for the given AI service + /// Whether the service should be the default for its type. + /// Self instance + public static KernelBuilder WithAzureTextCompletionService(this KernelBuilder builder, + string deploymentName, + OpenAIClient openAIClient, + string? serviceId = null, + bool setAsDefault = false) + { + builder.WithAIService(serviceId, (parameters) => + new AzureTextCompletion( + deploymentName, + openAIClient, + parameters.Logger), + setAsDefault); + + return builder; + } + /// /// Adds the OpenAI text completion service to the list. /// See https://platform.openai.com/docs for service details. diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelConfigExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelConfigExtensions.cs deleted file mode 100644 index 5ddc8a5696f2..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelConfigExtensions.cs +++ /dev/null @@ -1,414 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Reliability; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of KernelConfig -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -public static class OpenAIKernelConfigExtensions -{ - #region Text Completion - - /// - /// Adds an Azure OpenAI text completion service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureTextCompletionService(this KernelConfig config, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - ITextCompletion Factory(IKernel kernel) => new AzureTextCompletion( - deploymentName, - endpoint, - apiKey, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(Factory, serviceId); - - return config; - } - - /// - /// Adds an Azure OpenAI text completion service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureTextCompletionService(this KernelConfig config, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - ITextCompletion Factory(IKernel kernel) => new AzureTextCompletion( - deploymentName, - endpoint, - credentials, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(Factory, serviceId); - - return config; - } - - /// - /// Adds the OpenAI text completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The kernel config instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddOpenAITextCompletionService(this KernelConfig config, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - ITextCompletion Factory(IKernel kernel) => new OpenAITextCompletion( - modelId, - apiKey, - orgId, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(Factory, serviceId); - - return config; - } - - #endregion - - #region Text Embedding - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureTextEmbeddingGenerationService(this KernelConfig config, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IEmbeddingGeneration Factory(IKernel kernel) => new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - apiKey, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextEmbeddingGenerationService(Factory, serviceId); - - return config; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureTextEmbeddingGenerationService(this KernelConfig config, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IEmbeddingGeneration Factory(IKernel kernel) => new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - credentials, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextEmbeddingGenerationService(Factory, serviceId); - - return config; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The kernel config instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddOpenAITextEmbeddingGenerationService(this KernelConfig config, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IEmbeddingGeneration Factory(IKernel kernel) => new OpenAITextEmbeddingGeneration( - modelId, - apiKey, - orgId, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextEmbeddingGenerationService(Factory, serviceId); - - return config; - } - - #endregion - - #region Chat Completion - - /// - /// Adds the Azure OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureChatCompletionService(this KernelConfig config, - string deploymentName, - string endpoint, - string apiKey, - bool alsoAsTextCompletion = true, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IChatCompletion Factory(IKernel kernel) => new AzureChatCompletion( - deploymentName, endpoint, apiKey, kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), kernel.Log); - - config.AddChatCompletionService(Factory, serviceId); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - ITextCompletion TextServiceFactory(IKernel kernel) => new AzureChatCompletion( - deploymentName, - endpoint, - apiKey, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(TextServiceFactory, serviceId); - } - - return config; - } - - /// - /// Adds the Azure OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The kernel config instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddAzureChatCompletionService(this KernelConfig config, - string deploymentName, - string endpoint, - TokenCredential credentials, - bool alsoAsTextCompletion = true, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IChatCompletion Factory(IKernel kernel) => new AzureChatCompletion( - deploymentName, - endpoint, - credentials, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddChatCompletionService(Factory, serviceId); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - ITextCompletion TextServiceFactory(IKernel kernel) => new AzureChatCompletion( - deploymentName, - endpoint, - credentials, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(TextServiceFactory, serviceId); - } - - return config; - } - - /// - /// Adds the OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The kernel config instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddOpenAIChatCompletionService(this KernelConfig config, - string modelId, - string apiKey, - string? orgId = null, - bool alsoAsTextCompletion = true, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IChatCompletion Factory(IKernel kernel) => new OpenAIChatCompletion( - modelId, - apiKey, - orgId, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddChatCompletionService(Factory, serviceId); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(OpenAIChatCompletion))) - { - ITextCompletion TextServiceFactory(IKernel kernel) => new OpenAIChatCompletion( - modelId, - apiKey, - orgId, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddTextCompletionService(TextServiceFactory, serviceId); - } - - return config; - } - - #endregion - - #region Images - - /// - /// Add the OpenAI DallE image generation service to the list - /// - /// The kernel config instance - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Custom for HTTP requests. - /// Application logger - /// Self instance - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use the corresponding extension method in the KernelBuilder class instead.")] - public static KernelConfig AddOpenAIImageGenerationService(this KernelConfig config, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - IImageGeneration Factory(IKernel kernel) => new OpenAIImageGeneration( - apiKey, - orgId, - httpClient ?? kernel.Config.HttpHandlerFactory.CreateHttpClient(kernel.Log), - logger ?? kernel.Log); - - config.AddImageGenerationService(Factory, serviceId); - - return config; - } - - #endregion - - private static HttpClient CreateHttpClient(this IDelegatingHandlerFactory handlerFactory, - ILogger? logger) - { - var retryHandler = handlerFactory.Create(logger); - retryHandler.InnerHandler = new HttpClientHandler { CheckCertificateRevocationList = true }; - return new HttpClient(retryHandler); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs index 3d45ca19722d..26350604742c 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs @@ -4,6 +4,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AI.TextCompletion; @@ -51,12 +52,26 @@ public AzureTextCompletion( { } + /// + /// Creates a new AzureTextCompletion client instance using the specified OpenAIClient + /// + /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Application logger + public AzureTextCompletion( + string modelId, + OpenAIClient openAIClient, + ILogger? logger = null) : base(modelId, openAIClient, logger) + { + } + /// public IAsyncEnumerable GetStreamingCompletionsAsync( string text, CompleteRequestSettings requestSettings, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetTextStreamingResultsAsync(text, requestSettings, cancellationToken); } @@ -66,6 +81,7 @@ public Task> GetCompletionsAsync( CompleteRequestSettings requestSettings, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetTextResultsAsync(text, requestSettings, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs index 8636156d4180..0c9bcc0f28e4 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs @@ -40,6 +40,7 @@ public IAsyncEnumerable GetStreamingCompletionsAsync( CompleteRequestSettings requestSettings, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetTextStreamingResultsAsync(text, requestSettings, cancellationToken); } @@ -49,6 +50,7 @@ public Task> GetCompletionsAsync( CompleteRequestSettings requestSettings, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetTextResultsAsync(text, requestSettings, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs index 08333a0367ea..3f7b50959610 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs @@ -60,6 +60,7 @@ public Task>> GenerateEmbeddingsAsync( IList data, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetEmbeddingsAsync(data, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs index 737f20907d3f..4f41aae08b9f 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs +++ b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs @@ -43,6 +43,7 @@ public Task>> GenerateEmbeddingsAsync( IList data, CancellationToken cancellationToken = default) { + this.LogActionDetails(); return this.InternalGetEmbeddingsAsync(data, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemory.cs deleted file mode 100644 index 3403ca1a7f9c..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemory.cs +++ /dev/null @@ -1,373 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents; -using Azure.Search.Documents.Indexes; -using Azure.Search.Documents.Indexes.Models; -using Azure.Search.Documents.Models; -using Microsoft.SemanticKernel.Memory; - -namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -/// -/// Semantic Memory implementation using Azure Cognitive Search. -/// For more information about Azure Cognitive Search see https://learn.microsoft.com/azure/search/search-what-is-azure-search -/// -public class AzureCognitiveSearchMemory : ISemanticTextMemory -{ - private readonly SearchIndexClient _adminClient; - - private readonly ConcurrentDictionary _clientsByIndex = new(); - - /// - /// Create a new instance of semantic memory using Azure Cognitive Search. - /// - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// The Api key used to authenticate requests against the Search service. - /// Custom for HTTP requests. - public AzureCognitiveSearchMemory(string endpoint, string apiKey, HttpClient? httpClient = null) - { - var options = new SearchClientOptions(); - - if (httpClient != null) - { - options.Transport = new HttpClientTransport(httpClient); - } - - AzureKeyCredential credentials = new(apiKey); - this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, options); - } - - /// - /// Create a new instance of semantic memory using Azure Cognitive Search. - /// - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// The token credential used to authenticate requests against the Search service. - /// Custom for HTTP requests. - public AzureCognitiveSearchMemory(string endpoint, TokenCredential credentials, HttpClient? httpClient = null) - { - var options = new SearchClientOptions(); - - if (httpClient != null) - { - options.Transport = new HttpClientTransport(httpClient); - } - - this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, options); - } - - /// - public Task SaveInformationAsync( - string collection, - string text, - string id, - string? description = null, - string? additionalMetadata = null, - CancellationToken cancellationToken = default) - { - collection = NormalizeIndexName(collection); - - AzureCognitiveSearchRecord record = new() - { - Id = EncodeId(id), - Text = text, - Description = description, - AdditionalMetadata = additionalMetadata, - IsReference = false, - }; - - return this.UpsertRecordAsync(collection, record, cancellationToken); - } - - /// - public Task SaveReferenceAsync( - string collection, - string text, - string externalId, - string externalSourceName, - string? description = null, - string? additionalMetadata = null, - CancellationToken cancellationToken = default) - { - collection = NormalizeIndexName(collection); - - AzureCognitiveSearchRecord record = new() - { - Id = EncodeId(externalId), - Text = text, - Description = description, - AdditionalMetadata = additionalMetadata, - ExternalSourceName = externalSourceName, - IsReference = true, - }; - - return this.UpsertRecordAsync(collection, record, cancellationToken); - } - - /// - public async Task GetAsync( - string collection, - string key, - bool withEmbedding = false, - CancellationToken cancellationToken = default) - { - collection = NormalizeIndexName(collection); - - var client = this.GetSearchClient(collection); - - Response? result; - try - { - result = await client - .GetDocumentAsync(EncodeId(key), cancellationToken: cancellationToken) - .ConfigureAwait(false); - } - catch (RequestFailedException e) when (e.Status == 404) - { - // Index not found, no data to return - return null; - } - - if (result?.Value == null) - { - throw new AzureCognitiveSearchMemoryException("Memory read returned null"); - } - - return new MemoryQueryResult(ToMemoryRecordMetadata(result.Value), 1, null); - } - - /// - public async IAsyncEnumerable SearchAsync( - string collection, - string query, - int limit = 1, - double minRelevanceScore = 0.7, - bool withEmbeddings = false, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - collection = NormalizeIndexName(collection); - - var client = this.GetSearchClient(collection); - - // TODO: use vectors - var options = new SearchOptions - { - QueryType = SearchQueryType.Semantic, - SemanticConfigurationName = "default", - QueryLanguage = "en-us", - Size = limit, - }; - - Response>? searchResult = null; - try - { - searchResult = await client - .SearchAsync(query, options, cancellationToken: cancellationToken) - .ConfigureAwait(false); - } - catch (RequestFailedException e) when (e.Status == 404) - { - // Index not found, no data to return - } - - if (searchResult != null) - { - await foreach (SearchResult? doc in searchResult.Value.GetResultsAsync()) - { - if (doc.RerankerScore < minRelevanceScore) { break; } - - yield return new MemoryQueryResult(ToMemoryRecordMetadata(doc.Document), doc.RerankerScore ?? 1, null); - } - } - } - - /// - public async Task RemoveAsync(string collection, string key, CancellationToken cancellationToken = default) - { - collection = NormalizeIndexName(collection); - - var records = new List { new() { Id = EncodeId(key) } }; - - var client = this.GetSearchClient(collection); - try - { - await client.DeleteDocumentsAsync(records, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (RequestFailedException e) when (e.Status == 404) - { - // Index not found, no data to delete - } - } - - /// - public async Task> GetCollectionsAsync(CancellationToken cancellationToken = default) - { - ConfiguredCancelableAsyncEnumerable indexes = this._adminClient.GetIndexesAsync(cancellationToken).ConfigureAwait(false); - - var result = new List(); - await foreach (var index in indexes) - { - result.Add(index.Name); - } - - return result; - } - - #region private ================================================================================ - - /// - /// Index names cannot contain special chars. We use this rule to replace a few common ones - /// with an underscore and reduce the chance of errors. If other special chars are used, we leave it - /// to the service to throw an error. - /// Note: - /// - replacing chars introduces a small chance of conflicts, e.g. "the-user" and "the_user". - /// - we should consider whether making this optional and leave it to the developer to handle. - /// - private static readonly Regex s_replaceIndexNameSymbolsRegex = new(@"[\s|\\|/|.|_|:]"); - - /// - /// Get a search client for the index specified. - /// Note: the index might not exist, but we avoid checking everytime and the extra latency. - /// - /// Index name - /// Search client ready to read/write - private SearchClient GetSearchClient(string indexName) - { - // Search an available client from the local cache - if (!this._clientsByIndex.TryGetValue(indexName, out SearchClient client)) - { - client = this._adminClient.GetSearchClient(indexName); - this._clientsByIndex[indexName] = client; - } - - return client; - } - - /// - /// Create a new search index. - /// - /// Index name - /// Task cancellation token - private Task> CreateIndexAsync( - string indexName, - CancellationToken cancellationToken = default) - { - var fieldBuilder = new FieldBuilder(); - var fields = fieldBuilder.Build(typeof(AzureCognitiveSearchRecord)); - var newIndex = new SearchIndex(indexName, fields) - { - SemanticSettings = new SemanticSettings - { - Configurations = - { - // TODO: replace with vector search - new SemanticConfiguration("default", new PrioritizedFields - { - TitleField = new SemanticField { FieldName = "Description" }, - ContentFields = - { - new SemanticField { FieldName = "Text" }, - new SemanticField { FieldName = "AdditionalMetadata" }, - } - }) - } - } - }; - - return this._adminClient.CreateIndexAsync(newIndex, cancellationToken); - } - - private async Task UpsertRecordAsync( - string indexName, - AzureCognitiveSearchRecord record, - CancellationToken cancellationToken = default) - { - var client = this.GetSearchClient(indexName); - - Task> UpsertCode() => client - .MergeOrUploadDocumentsAsync(new List { record }, - new IndexDocumentsOptions { ThrowOnAnyError = true }, - cancellationToken); - - Response? result; - try - { - result = await UpsertCode().ConfigureAwait(false); - } - catch (RequestFailedException e) when (e.Status == 404) - { - await this.CreateIndexAsync(indexName, cancellationToken).ConfigureAwait(false); - result = await UpsertCode().ConfigureAwait(false); - } - - if (result == null || result.Value.Results.Count == 0) - { - throw new AzureCognitiveSearchMemoryException("Memory write returned null or an empty set"); - } - - return result.Value.Results[0].Key; - } - - private static MemoryRecordMetadata ToMemoryRecordMetadata(AzureCognitiveSearchRecord data) - { - return new MemoryRecordMetadata( - isReference: data.IsReference, - id: DecodeId(data.Id), - text: data.Text ?? string.Empty, - description: data.Description ?? string.Empty, - externalSourceName: data.ExternalSourceName, - additionalMetadata: data.AdditionalMetadata ?? string.Empty); - } - - /// - /// Normalize index name to match ACS rules. - /// The method doesn't handle all the error scenarios, leaving it to the service - /// to throw an error for edge cases not handled locally. - /// - /// Value to normalize - /// Normalized name - private static string NormalizeIndexName(string indexName) - { - if (indexName.Length > 128) - { - throw new AzureCognitiveSearchMemoryException("The collection name is too long, it cannot exceed 128 chars"); - } - -#pragma warning disable CA1308 // The service expects a lowercase string - indexName = indexName.ToLowerInvariant(); -#pragma warning restore CA1308 - - return s_replaceIndexNameSymbolsRegex.Replace(indexName.Trim(), "-"); - } - - /// - /// ACS keys can contain only letters, digits, underscore, dash, equal sign, recommending - /// to encode values with a URL-safe algorithm. - /// - /// Original Id - /// Encoded id - private static string EncodeId(string realId) - { - var bytes = Encoding.UTF8.GetBytes(realId); - return Convert.ToBase64String(bytes); - } - - private static string DecodeId(string encodedId) - { - var bytes = Convert.FromBase64String(encodedId); - return Encoding.UTF8.GetString(bytes); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryException.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryException.cs index 17f936b46c3a..a21202fa1dff 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryException.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryException.cs @@ -12,19 +12,69 @@ namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; public class AzureCognitiveSearchMemoryException : Exception { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class with a provided error code and message. /// - /// Exception message. - public AzureCognitiveSearchMemoryException(string? message) : base(message) + /// The error code. + /// The exception message. + public AzureCognitiveSearchMemoryException(ErrorCodes errorCode, string? message) + : this(errorCode, message, innerException: null) { } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class with a provided error code, message, and inner exception. /// - /// Exception message. - /// Inner exception. - public AzureCognitiveSearchMemoryException(string? message, Exception? innerException) : base(message, innerException) + /// The error code. + /// A string that describes the error. + /// The exception that is the cause of the current exception. + public AzureCognitiveSearchMemoryException(ErrorCodes errorCode, string? message, Exception? innerException) + : base(GetDefaultMessage(errorCode, message, innerException), innerException) { + this.ErrorCode = errorCode; + } + + /// + /// Gets the error code for this exception. + /// + public ErrorCodes ErrorCode { get; } + + /// Translate the error code into a default message. + private static string GetDefaultMessage(ErrorCodes errorCode, string? message, Exception? innerException) + { + if (message is not null) { return message; } + + var description = errorCode.ToString("G"); + return innerException is not null ? $"{description}: {innerException.Message}" : description; + } + + /// + /// Error codes for the Qdrant connector exceptions. + /// + public enum ErrorCodes + { + /// + /// Unknown error. + /// + UnknownError = -1, + + /// + /// Invalid embedding size, the value must be greater than zero + /// + InvalidEmbeddingSize, + + /// + /// Invalid index name + /// + InvalidIndexName, + + /// + /// Read failure + /// + ReadFailure, + + /// + /// Write failure + /// + WriteFailure, } } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs new file mode 100644 index 000000000000..40ece7daea7f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.AI.Embeddings; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; + +/// +/// Azure Cognitive Search record and index definition. +/// Note: once defined, index cannot be modified. +/// +public class AzureCognitiveSearchMemoryRecord +{ + public const string IdField = "Id"; + public const string TextField = "Text"; + public const string EmbeddingField = "Embedding"; + public const string ExternalSourceNameField = "ExternalSourceName"; + public const string DescriptionField = "Description"; + public const string AdditionalMetadataField = "AdditionalMetadata"; + public const string IsReferenceField = "IsReference"; + + /// + /// Record Id. + /// The record is not filterable to save quota, also SK uses only semantic search. + /// + [JsonPropertyName(IdField)] + public string Id { get; set; } = string.Empty; + + /// + /// Content is stored here. + /// + [JsonPropertyName(TextField)] + public string? Text { get; set; } = string.Empty; + + /// + /// Content embedding + /// + [JsonPropertyName(EmbeddingField)] + public List Embedding { get; set; } = Array.Empty().ToList(); + + /// + /// Optional description of the content, e.g. a title. This can be useful when + /// indexing external data without pulling in the entire content. + /// + [JsonPropertyName(DescriptionField)] + public string? Description { get; set; } = string.Empty; + + /// + /// Additional metadata. Currently this is a string, where you could store serialized data as JSON. + /// In future the design might change to allow storing named values and leverage filters. + /// + [JsonPropertyName(AdditionalMetadataField)] + public string? AdditionalMetadata { get; set; } = string.Empty; + + /// + /// Name of the external source, in cases where the content and the Id are + /// referenced to external information. + /// + [JsonPropertyName(ExternalSourceNameField)] + public string ExternalSourceName { get; set; } = string.Empty; + + /// + /// Whether the record references external information. + /// + [JsonPropertyName(IsReferenceField)] + public bool IsReference { get; set; } = false; + + /// + /// Ctor required by JSON deserializer + /// + public AzureCognitiveSearchMemoryRecord() + { + } + + public AzureCognitiveSearchMemoryRecord(string id) + { + this.Id = EncodeId(id); + } + + public AzureCognitiveSearchMemoryRecord( + string id, + string text, + string externalSourceName, + bool isReference, + Embedding embedding, + string? description = null, + string? additionalMetadata = null) + { + this.Id = EncodeId(id); + this.IsReference = isReference; + this.Embedding = embedding.Vector.ToList(); + this.Text = text; + this.ExternalSourceName = externalSourceName; + this.Description = description; + this.AdditionalMetadata = additionalMetadata; + } + + public MemoryRecordMetadata ToMemoryRecordMetadata() + { + return new MemoryRecordMetadata( + isReference: this.IsReference, + id: DecodeId(this.Id), + text: this.Text ?? string.Empty, + description: this.Description ?? string.Empty, + externalSourceName: this.ExternalSourceName, + additionalMetadata: this.AdditionalMetadata ?? string.Empty); + } + + public static AzureCognitiveSearchMemoryRecord FromMemoryRecord(MemoryRecord record) + { + return new AzureCognitiveSearchMemoryRecord( + id: record.Metadata.Id, + text: record.Metadata.Text, + externalSourceName: string.Empty, + isReference: record.Metadata.IsReference, + description: record.Metadata.Description, + additionalMetadata: record.Metadata.AdditionalMetadata, + embedding: record.Embedding + ); + } + + public MemoryRecord ToMemoryRecord(bool withEmbeddings = true) + { + return new MemoryRecord( + metadata: this.ToMemoryRecordMetadata(), + embedding: new Embedding(withEmbeddings ? this.Embedding : Array.Empty()), + key: this.Id); + } + + /// + /// ACS keys can contain only letters, digits, underscore, dash, equal sign, recommending + /// to encode values with a URL-safe algorithm. + /// + /// Original Id + /// Encoded id + protected internal static string EncodeId(string realId) + { + var bytes = Encoding.UTF8.GetBytes(realId); + return Convert.ToBase64String(bytes); + } + + private protected static string DecodeId(string encodedId) + { + var bytes = Convert.FromBase64String(encodedId); + return Encoding.UTF8.GetString(bytes); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs new file mode 100644 index 000000000000..e007c5c0a4ad --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs @@ -0,0 +1,433 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Search.Documents; +using Azure.Search.Documents.Indexes; +using Azure.Search.Documents.Indexes.Models; +using Azure.Search.Documents.Models; +using Microsoft.SemanticKernel.AI.Embeddings; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; + +public class AzureCognitiveSearchMemoryStore : IMemoryStore +{ + // Note: Azure max length 24 chars + private const string UserAgent = "Semantic-Kernel"; + + /// + /// Create a new instance of memory storage using Azure Cognitive Search. + /// + /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" + /// API Key + public AzureCognitiveSearchMemoryStore(string endpoint, string apiKey) + { + AzureKeyCredential credentials = new(apiKey); + this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, ClientOptions()); + } + + /// + /// Create a new instance of memory storage using Azure Cognitive Search. + /// + /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" + /// Azure service + public AzureCognitiveSearchMemoryStore(string endpoint, TokenCredential credentials) + { + this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, ClientOptions()); + } + + /// + public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) + { + // Indexes are created when sending a record. The creation requires the size of the embeddings. + return Task.CompletedTask; + } + + /// + public IAsyncEnumerable GetCollectionsAsync(CancellationToken cancellationToken = default) + { + return this.GetIndexesAsync(cancellationToken); + } + + /// + public async Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default) + { + string normalizeIndexName = this.NormalizeIndexName(collectionName); + + return await this.GetIndexesAsync(cancellationToken) + .AnyAsync(index => string.Equals(index, collectionName, StringComparison.OrdinalIgnoreCase) + || string.Equals(index, normalizeIndexName, StringComparison.OrdinalIgnoreCase), + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default) + { + string normalizeIndexName = this.NormalizeIndexName(collectionName); + + return this._adminClient.DeleteIndexAsync(normalizeIndexName, cancellationToken); + } + + /// + public Task UpsertAsync(string collectionName, MemoryRecord record, CancellationToken cancellationToken = default) + { + collectionName = this.NormalizeIndexName(collectionName); + return this.UpsertRecordAsync(collectionName, AzureCognitiveSearchMemoryRecord.FromMemoryRecord(record), cancellationToken); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + collectionName = this.NormalizeIndexName(collectionName); + IList searchRecords = records.Select(AzureCognitiveSearchMemoryRecord.FromMemoryRecord).ToList(); + List result = await this.UpsertBatchAsync(collectionName, searchRecords, cancellationToken).ConfigureAwait(false); + foreach (var x in result) { yield return x; } + } + + /// + public async Task GetAsync(string collectionName, string key, bool withEmbedding = false, CancellationToken cancellationToken = default) + { + collectionName = this.NormalizeIndexName(collectionName); + var client = this.GetSearchClient(collectionName); + Response? result; + try + { + result = await client + .GetDocumentAsync(AzureCognitiveSearchMemoryRecord.EncodeId(key), cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + catch (RequestFailedException e) when (e.Status == 404) + { + // Index not found, no data to return + return null; + } + + if (result?.Value == null) + { + throw new AzureCognitiveSearchMemoryException( + AzureCognitiveSearchMemoryException.ErrorCodes.ReadFailure, + "Memory read returned null"); + } + + return result.Value.ToMemoryRecord(); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + string collectionName, + IEnumerable keys, + bool withEmbeddings = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var key in keys) + { + var record = await this.GetAsync(collectionName, key, withEmbeddings, cancellationToken).ConfigureAwait(false); + if (record != null) { yield return record; } + } + } + + /// + public async Task<(MemoryRecord, double)?> GetNearestMatchAsync( + string collectionName, + Embedding embedding, + double minRelevanceScore = 0, + bool withEmbedding = false, + CancellationToken cancellationToken = default) + { + return await this.GetNearestMatchesAsync(collectionName, embedding, 1, minRelevanceScore, withEmbedding, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable<(MemoryRecord, double)> GetNearestMatchesAsync( + string collectionName, + Embedding embedding, + int limit, + double minRelevanceScore = 0, + bool withEmbeddings = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + collectionName = this.NormalizeIndexName(collectionName); + + var client = this.GetSearchClient(collectionName); + + SearchQueryVector vectorQuery = new() + { + KNearestNeighborsCount = limit, + Fields = AzureCognitiveSearchMemoryRecord.EmbeddingField, + Value = embedding.Vector.ToList() + }; + + SearchOptions options = new() { Vector = vectorQuery }; + Response>? searchResult = null; + try + { + searchResult = await client + .SearchAsync(null, options, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + catch (RequestFailedException e) when (e.Status == 404) + { + // Index not found, no data to return + } + + if (searchResult == null) { yield break; } + + await foreach (SearchResult? doc in searchResult.Value.GetResultsAsync()) + { + if (doc == null || doc.Score < minRelevanceScore) { continue; } + + MemoryRecord memoryRecord = doc.Document.ToMemoryRecord(withEmbeddings); + + yield return (memoryRecord, doc.Score ?? 0); + } + } + + /// + public Task RemoveAsync(string collectionName, string key, CancellationToken cancellationToken = default) + { + return this.RemoveBatchAsync(collectionName, new[] { key }, cancellationToken); + } + + /// + public async Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default) + { + collectionName = this.NormalizeIndexName(collectionName); + + var records = keys.Select(x => new List { new(x) }); + + var client = this.GetSearchClient(collectionName); + try + { + await client.DeleteDocumentsAsync(records, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (RequestFailedException e) when (e.Status == 404) + { + // Index not found, no data to delete + } + } + + #region private + + /// + /// Index names cannot contain special chars. We use this rule to replace a few common ones + /// with an underscore and reduce the chance of errors. If other special chars are used, we leave it + /// to the service to throw an error. + /// Note: + /// - replacing chars introduces a small chance of conflicts, e.g. "the-user" and "the_user". + /// - we should consider whether making this optional and leave it to the developer to handle. + /// + private static readonly Regex s_replaceIndexNameSymbolsRegex = new(@"[\s|\\|/|.|_|:]"); + + private readonly ConcurrentDictionary _clientsByIndex = new(); + + private readonly SearchIndexClient _adminClient; + + /// + /// Create a new search index. + /// + /// Index name + /// Size of the embedding vector + /// Task cancellation token + private Task> CreateIndexAsync( + string indexName, + int embeddingSize, + CancellationToken cancellationToken = default) + { + if (embeddingSize < 1) + { + throw new AzureCognitiveSearchMemoryException( + AzureCognitiveSearchMemoryException.ErrorCodes.InvalidEmbeddingSize, + "Invalid embedding size: the value must be greater than zero."); + } + + var configName = "searchConfig"; + var newIndex = new SearchIndex(indexName) + { + Fields = new List + { + new SimpleField(AzureCognitiveSearchMemoryRecord.IdField, SearchFieldDataType.String) { IsKey = true }, + new SearchField(AzureCognitiveSearchMemoryRecord.EmbeddingField, SearchFieldDataType.Collection(SearchFieldDataType.Single)) + { + IsSearchable = true, + VectorSearchDimensions = embeddingSize, + VectorSearchConfiguration = configName + }, + new SearchField(AzureCognitiveSearchMemoryRecord.TextField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureCognitiveSearchMemoryRecord.DescriptionField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureCognitiveSearchMemoryRecord.AdditionalMetadataField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureCognitiveSearchMemoryRecord.ExternalSourceNameField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureCognitiveSearchMemoryRecord.IsReferenceField, SearchFieldDataType.Boolean) { IsFilterable = true, IsFacetable = true }, + }, + VectorSearch = new VectorSearch + { + AlgorithmConfigurations = + { + new HnswVectorSearchAlgorithmConfiguration(configName) + { + Parameters = new HnswParameters { Metric = VectorSearchAlgorithmMetric.Cosine } + } + } + } + }; + + return this._adminClient.CreateIndexAsync(newIndex, cancellationToken); + } + + private async IAsyncEnumerable GetIndexesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var indexes = this._adminClient.GetIndexesAsync(cancellationToken).ConfigureAwait(false); + await foreach (SearchIndex? index in indexes) + { + yield return index.Name; + } + } + + private async Task UpsertRecordAsync( + string indexName, + AzureCognitiveSearchMemoryRecord record, + CancellationToken cancellationToken = default) + { + var list = await this.UpsertBatchAsync(indexName, new List { record }, cancellationToken).ConfigureAwait(false); + return list.First(); + } + + private async Task> UpsertBatchAsync( + string indexName, + IList records, + CancellationToken cancellationToken = default) + { + var keys = new List(); + + if (records.Count < 1) { return keys; } + + var embeddingSize = records[0].Embedding.Count; + + var client = this.GetSearchClient(indexName); + + Task> UpsertCode() + { + return client.IndexDocumentsAsync( + IndexDocumentsBatch.Upload(records), + new IndexDocumentsOptions { ThrowOnAnyError = true }, + cancellationToken: cancellationToken); + } + + Response? result; + try + { + result = await UpsertCode().ConfigureAwait(false); + } + catch (RequestFailedException e) when (e.Status == 404) + { + await this.CreateIndexAsync(indexName, embeddingSize, cancellationToken).ConfigureAwait(false); + result = await UpsertCode().ConfigureAwait(false); + } + + if (result == null || result.Value.Results.Count == 0) + { + throw new AzureCognitiveSearchMemoryException( + AzureCognitiveSearchMemoryException.ErrorCodes.WriteFailure, + "Memory write returned null or an empty set"); + } + + return result.Value.Results.Select(x => x.Key).ToList(); + } + + /// + /// Normalize index name to match ACS rules. + /// The method doesn't handle all the error scenarios, leaving it to the service + /// to throw an error for edge cases not handled locally. + /// + /// Value to normalize + /// Normalized name + private string NormalizeIndexName(string indexName) + { + if (indexName.Length > 128) + { + throw new AzureCognitiveSearchMemoryException( + AzureCognitiveSearchMemoryException.ErrorCodes.InvalidIndexName, + "The collection name is too long, it cannot exceed 128 chars."); + } + +#pragma warning disable CA1308 // The service expects a lowercase string + indexName = indexName.ToLowerInvariant(); +#pragma warning restore CA1308 + + return s_replaceIndexNameSymbolsRegex.Replace(indexName.Trim(), "-"); + } + + /// + /// Get a search client for the index specified. + /// Note: the index might not exist, but we avoid checking everytime and the extra latency. + /// + /// Index name + /// Search client ready to read/write + private SearchClient GetSearchClient(string indexName) + { + // Search an available client from the local cache + if (!this._clientsByIndex.TryGetValue(indexName, out SearchClient client)) + { + client = this._adminClient.GetSearchClient(indexName); + this._clientsByIndex[indexName] = client; + } + + return client; + } + + /// + /// Options used by the Azure Cognitive Search client, e.g. User Agent. + /// See also https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs + /// + private static SearchClientOptions ClientOptions() + { + return new SearchClientOptions + { + Diagnostics = + { + IsTelemetryEnabled = IsTelemetryEnabled(), + ApplicationId = UserAgent, + }, + }; + } + + /// + /// Source: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs + /// + private static bool IsTelemetryEnabled() + { + return !EnvironmentVariableToBool(Environment.GetEnvironmentVariable("AZURE_TELEMETRY_DISABLED")) ?? true; + } + + /// + /// Source: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs + /// + private static bool? EnvironmentVariableToBool(string? value) + { + if (string.Equals(bool.TrueString, value, StringComparison.OrdinalIgnoreCase) || + string.Equals("1", value, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (string.Equals(bool.FalseString, value, StringComparison.OrdinalIgnoreCase) || + string.Equals("0", value, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + return null; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchRecord.cs deleted file mode 100644 index ab3e5678d81d..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchRecord.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.Search.Documents.Indexes; -using Azure.Search.Documents.Indexes.Models; - -namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -/// -/// Azure Cognitive Search record and index definition. -/// Note: once defined, index cannot be modified. -/// -public class AzureCognitiveSearchRecord -{ - /// - /// Record Id. - /// The record is not filterable to save quota, also SK uses only semantic search. - /// - [SimpleField(IsKey = true, IsFilterable = false)] - public string Id { get; set; } = string.Empty; - - /// - /// Content is stored here. - /// - [SearchableField(AnalyzerName = LexicalAnalyzerName.Values.StandardLucene)] - public string? Text { get; set; } = string.Empty; - - /// - /// Optional description of the content, e.g. a title. This can be useful when - /// indexing external data without pulling in the entire content. - /// - [SearchableField(AnalyzerName = LexicalAnalyzerName.Values.StandardLucene)] - public string? Description { get; set; } = string.Empty; - - /// - /// Additional metadata. Currently this is a string, where you could store serialized data as JSON. - /// In future the design might change to allow storing named values and leverage filters. - /// - [SearchableField(AnalyzerName = LexicalAnalyzerName.Values.StandardLucene)] - public string? AdditionalMetadata { get; set; } = string.Empty; - - /// - /// Name of the external source, in cases where the content and the Id are - /// referenced to external information. - /// - [SimpleField(IsFilterable = false)] - public string ExternalSourceName { get; set; } = string.Empty; - - /// - /// Whether the record references external information. - /// - [SimpleField(IsFilterable = false)] - public bool IsReference { get; set; } = false; - - // TODO: add one more field with the vector, float array, mark it as searchable -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensions.cs deleted file mode 100644 index 9d3c9266b837..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensions.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Azure.Core; -using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Azure Cognitive Search connectors. -/// -public static class AzureSearchServiceKernelBuilderExtensions -{ - /// - /// Registers Azure Cognitive Search Memory Store. - /// - /// The instance - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// The Api key used to authenticate requests against the Search service. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureCognitiveSearchMemory(this KernelBuilder builder, - string endpoint, - string apiKey, - HttpClient? httpClient = null) - { - builder.WithMemory((parameters) => - { - return new AzureCognitiveSearchMemory( - endpoint, - apiKey, - HttpClientProvider.GetHttpClient(parameters.Config, httpClient, parameters.Logger)); - }); - - return builder; - } - - /// - /// Registers Azure Cognitive Search Memory Store. - /// - /// The instance - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// The token credential used to authenticate requests against the Search service. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureCognitiveSearchMemory(this KernelBuilder builder, - string endpoint, - TokenCredential credentials, - HttpClient? httpClient = null) - { - builder.WithMemory((parameters) => - { - return new AzureCognitiveSearchMemory( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(parameters.Config, httpClient, parameters.Logger)); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj index e69ff7127129..a9e9143a793c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj @@ -15,6 +15,7 @@ + Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch Semantic Kernel - Azure Cognitive Search Semantic Memory Azure Cognitive Search Semantic Memory connector for Semantic Kernel diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaBooleanConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaBooleanConverter.cs new file mode 100644 index 000000000000..6c4a9fc8fa42 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaBooleanConverter.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; + +/// +/// JSON Converter for Chroma boolean values. +/// +public class ChromaBooleanConverter : JsonConverter +{ + /// + public override bool Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (!reader.TryGetInt16(out short value)) + { + return false; + } + + return Convert.ToBoolean(value); + } + + /// + public override void Write(Utf8JsonWriter writer, bool value, JsonSerializerOptions options) + { + writer.WriteNumberValue(Convert.ToDecimal(value)); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClientException.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClientException.cs index 7568718b5191..535e7d3a3afb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClientException.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClientException.cs @@ -11,7 +11,6 @@ namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; public class ChromaClientException : Exception { private const string CollectionDoesNotExistErrorFormat = "Collection {0} does not exist"; - private const string DeleteNonExistentCollectionErrorMessage = "list index out of range"; /// /// Initializes a new instance of the class. @@ -43,10 +42,4 @@ public ChromaClientException(string message, Exception innerException) : base(me /// Collection name. public bool CollectionDoesNotExistException(string collectionName) => this.Message.Contains(string.Format(CultureInfo.InvariantCulture, CollectionDoesNotExistErrorFormat, collectionName)); - - /// - /// Checks if Chroma API error means that there was an attempt to delete non-existent collection. - /// - public bool DeleteNonExistentCollectionException() => - this.Message.Contains(DeleteNonExistentCollectionErrorMessage); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs index 08d80d435eea..723d54330567 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs @@ -70,7 +70,7 @@ public async Task DeleteCollectionAsync(string collectionName, CancellationToken { await this._chromaClient.DeleteCollectionAsync(collectionName, cancellationToken).ConfigureAwait(false); } - catch (ChromaClientException e) when (e.DeleteNonExistentCollectionException()) + catch (ChromaClientException e) when (e.CollectionDoesNotExistException(collectionName)) { this._logger.LogError("Cannot delete non-existent collection {0}", collectionName); throw new ChromaMemoryStoreException($"Cannot delete non-existent collection {collectionName}", e); @@ -230,6 +230,11 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE private readonly IChromaClient _chromaClient; private readonly List _defaultEmbeddingIncludeTypes = new() { IncludeMetadatas }; + private readonly JsonSerializerOptions _jsonSerializerOptions = new() + { + Converters = { new ChromaBooleanConverter() } + }; + private async Task GetCollectionOrThrowAsync(string collectionName, CancellationToken cancellationToken) { return @@ -292,15 +297,19 @@ private MemoryRecord GetMemoryRecordFromModel(List>? var embeddingsVector = this.GetEmbeddingForMemoryRecord(embeddings, recordIndex); var key = ids?[recordIndex]; - return MemoryRecord.FromJsonMetadata( - json: metadata, + return MemoryRecord.FromMetadata( + metadata: metadata, embedding: embeddingsVector, key: key); } - private string GetMetadataForMemoryRecord(List>? metadatas, int recordIndex) + private MemoryRecordMetadata GetMetadataForMemoryRecord(List>? metadatas, int recordIndex) { - return metadatas != null ? JsonSerializer.Serialize(metadatas[recordIndex]) : string.Empty; + var serializedMetadata = metadatas != null ? JsonSerializer.Serialize(metadatas[recordIndex]) : string.Empty; + + return + JsonSerializer.Deserialize(serializedMetadata, this._jsonSerializerOptions) ?? + throw new ChromaMemoryStoreException("Unable to deserialize memory record metadata."); } private Embedding GetEmbeddingForMemoryRecord(List? embeddings, int recordIndex) diff --git a/dotnet/src/Connectors/Connectors.Memory.CosmosDB/CosmosMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.CosmosDB/CosmosMemoryStore.cs index ebfc40d8b04d..3a6d6d9f0a9a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.CosmosDB/CosmosMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.CosmosDB/CosmosMemoryStore.cs @@ -52,11 +52,11 @@ public static async Task CreateAsync(CosmosClient client, str if (response.StatusCode == HttpStatusCode.Created) { - newStore._log.LogInformation("Created database {0}", newStore._databaseName); + newStore._log.LogDebug("Created database {0}", newStore._databaseName); } else if (response.StatusCode == HttpStatusCode.OK) { - newStore._log.LogInformation("Database {0}", newStore._databaseName); + newStore._log.LogDebug("Database {0}", newStore._databaseName); } else { @@ -84,11 +84,11 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken if (response.StatusCode == HttpStatusCode.Created) { - this._log.LogInformation("Created collection {0}", collectionName); + this._log.LogDebug("Created collection {0}", collectionName); } else if (response.StatusCode == HttpStatusCode.OK) { - this._log.LogInformation("Collection {0} already exists", collectionName); + this._log.LogDebug("Collection {0} already exists", collectionName); } else { @@ -191,7 +191,7 @@ public async Task UpsertAsync(string collectionName, MemoryRecord record if (response.StatusCode is HttpStatusCode.OK or HttpStatusCode.Created) { - this._log.LogInformation("Upserted item to collection {0}", collectionName); + this._log.LogDebug("Upserted item to collection {0}", collectionName); } else { @@ -221,7 +221,7 @@ public async Task RemoveAsync(string collectionName, string key, CancellationTok if (response.StatusCode == HttpStatusCode.OK) { - this._log.LogInformation("Record deleted from {0}", collectionName); + this._log.LogDebug("Record deleted from {0}", collectionName); } else { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs index f5fdf2192d0c..8089c1d62cc8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs @@ -47,7 +47,7 @@ public PineconeClient(string pineconeEnvironment, string apiKey, ILogger? logger bool includeValues = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - this._logger.LogInformation("Searching vectors by id"); + this._logger.LogDebug("Searching vectors by id"); string basePath = await this.GetVectorOperationsApiBasePathAsync(indexName).ConfigureAwait(false); @@ -101,7 +101,7 @@ public PineconeClient(string pineconeEnvironment, string apiKey, ILogger? logger bool includeMetadata = true, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - this._logger.LogInformation("Querying top {0} nearest vectors", query.TopK); + this._logger.LogDebug("Querying top {0} nearest vectors", query.TopK); using HttpRequestMessage request = QueryRequest.QueryIndex(query) .WithMetadata(includeMetadata) @@ -154,7 +154,7 @@ public PineconeClient(string pineconeEnvironment, string apiKey, ILogger? logger Dictionary? filter = default, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - this._logger.LogInformation("Searching top {0} nearest vectors with threshold {1}", topK, threshold); + this._logger.LogDebug("Searching top {0} nearest vectors with threshold {1}", topK, threshold); List<(PineconeDocument document, float score)> documents = new(); @@ -203,7 +203,7 @@ public async Task UpsertAsync( string indexNamespace = "", CancellationToken cancellationToken = default) { - this._logger.LogInformation("Upserting vectors"); + this._logger.LogDebug("Upserting vectors"); int totalUpserted = 0; int totalBatches = 0; @@ -239,10 +239,10 @@ public async Task UpsertAsync( totalUpserted += data.UpsertedCount; - this._logger.LogInformation("Upserted batch {0} with {1} vectors", totalBatches, data.UpsertedCount); + this._logger.LogDebug("Upserted batch {0} with {1} vectors", totalBatches, data.UpsertedCount); } - this._logger.LogInformation("Upserted {0} vectors in {1} batches", totalUpserted, totalBatches); + this._logger.LogDebug("Upserted {0} vectors in {1} batches", totalUpserted, totalBatches); return totalUpserted; } @@ -273,7 +273,7 @@ public async Task DeleteAsync( .FromNamespace(indexNamespace) .FilterBy(filter); - this._logger.LogInformation("Delete operation for Index {0}: {1}", indexName, deleteRequest.ToString()); + this._logger.LogDebug("Delete operation for Index {0}: {1}", indexName, deleteRequest.ToString()); string basePath = await this.GetVectorOperationsApiBasePathAsync(indexName).ConfigureAwait(false); @@ -295,7 +295,7 @@ public async Task DeleteAsync( /// public async Task UpdateAsync(string indexName, PineconeDocument document, string indexNamespace = "", CancellationToken cancellationToken = default) { - this._logger.LogInformation("Updating vector: {0}", document.Id); + this._logger.LogDebug("Updating vector: {0}", document.Id); string basePath = await this.GetVectorOperationsApiBasePathAsync(indexName).ConfigureAwait(false); @@ -380,7 +380,7 @@ public async Task UpdateAsync(string indexName, PineconeDocument document, strin /// public async Task CreateIndexAsync(IndexDefinition indexDefinition, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Creating index {0}", indexDefinition.ToString()); + this._logger.LogDebug("Creating index {0}", indexDefinition.ToString()); string indexName = indexDefinition.Name; @@ -412,7 +412,7 @@ public async Task CreateIndexAsync(IndexDefinition indexDefinition, Cancellation /// public async Task DeleteIndexAsync(string indexName, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Deleting index {0}", indexName); + this._logger.LogDebug("Deleting index {0}", indexName); using HttpRequestMessage request = DeleteIndexRequest.Create(indexName).Build(); @@ -433,13 +433,13 @@ public async Task DeleteIndexAsync(string indexName, CancellationToken cancellat throw; } - this._logger.LogInformation("Index: {0} has been successfully deleted.", indexName); + this._logger.LogDebug("Index: {0} has been successfully deleted.", indexName); } /// public async Task DoesIndexExistAsync(string indexName, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Checking for index {0}", indexName); + this._logger.LogDebug("Checking for index {0}", indexName); List? indexNames = await this.ListIndexesAsync(cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); @@ -558,9 +558,14 @@ private string GetIndexOperationsApiBasePath() string responseContent = await response.Content.ReadAsStringAsync().ConfigureAwait(false); - string logMessage = response.IsSuccessStatusCode ? "Pinecone responded successfully" : "Pinecone responded with error"; - - this._logger.LogTrace("{0} - {1}", logMessage, responseContent); + if (response.IsSuccessStatusCode) + { + this._logger.LogDebug("Pinecone responded successfully"); + } + else + { + this._logger.LogWarning("Pinecone responded with error"); + } return (response, responseContent); } @@ -572,7 +577,7 @@ private async Task GetIndexHostAsync(string indexName, CancellationToken return indexHost; } - this._logger.LogInformation("Getting index host from Pinecone."); + this._logger.LogDebug("Getting index host from Pinecone."); PineconeIndex? pineconeIndex = await this.DescribeIndexAsync(indexName, cancellationToken).ConfigureAwait(false); @@ -590,7 +595,7 @@ private async Task GetIndexHostAsync(string indexName, CancellationToken $"Host of index {indexName} is unknown."); } - this._logger.LogInformation("Found host {0} for index {1}", pineconeIndex.Status.Host, indexName); + this._logger.LogDebug("Found host {0} for index {1}", pineconeIndex.Status.Host, indexName); this._indexHostMapping.TryAdd(indexName, pineconeIndex.Status.Host); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs index 38753c57c81e..e6325f4ebe45 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs @@ -27,20 +27,6 @@ public class QdrantMemoryStore : IMemoryStore /// private readonly ILogger? _logger; - /// - /// Constructor for a memory store backed by a Qdrant Vector Database instance. - /// - /// - /// - /// - /// - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public QdrantMemoryStore(string host, int port, int vectorSize, ILogger? logger = null) - { - this._logger = logger; - this._qdrantClient = new QdrantVectorDbClient(endpoint: host, port: port, vectorSize: vectorSize, log: logger); - } - /// /// Initializes a new instance of the class. /// @@ -77,15 +63,6 @@ public QdrantMemoryStore(IQdrantVectorDbClient client, ILogger? logger = null) this._logger = logger; } - /// - /// Constructor for a memory store backed by a - /// - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public QdrantMemoryStore(IQdrantVectorDbClient client) - { - this._qdrantClient = client; - } - /// public async Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs index 0c3e634b2896..bbd70d7cbef3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs @@ -13,7 +13,6 @@ using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.AI; using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http; using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; @@ -22,46 +21,10 @@ namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; /// An implementation of a client for the Qdrant Vector Database. This class is used to /// connect, create, delete, and get embeddings data from a Qdrant Vector Database instance. /// -#pragma warning disable CA1001 // Types that own disposable fields should be disposable. Explanation - In this case, there is no need to dispose because either the NonDisposableHttpClientHandler or a custom HTTP client is being used. +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. public sealed class QdrantVectorDbClient : IQdrantVectorDbClient -#pragma warning restore CA1001 // Types that own disposable fields should be disposable. Explanation - In this case, there is no need to dispose because either the NonDisposableHttpClientHandler or a custom HTTP client is being used. +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { - /// - /// The endpoint for the Qdrant service. - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public string BaseAddress => this._httpClient.BaseAddress.ToString(); - - /// - /// The port for the Qdrant service. - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public int Port => this._httpClient.BaseAddress.Port; - - /// - /// The constructor for the QdrantVectorDbClient. - /// - /// - /// - /// - /// - /// - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public QdrantVectorDbClient( - string endpoint, - int vectorSize, - int? port = null, - HttpClient? httpClient = null, - ILogger? log = null) - { - Verify.ArgNotNullOrEmpty(endpoint, "Qdrant endpoint cannot be null or empty"); - - this._vectorSize = vectorSize; - this._logger = log ?? NullLogger.Instance; - this._httpClient = httpClient ?? new HttpClient(HttpHandlers.CheckCertificateRevocation); - this._httpClient.BaseAddress = SanitizeEndpoint(endpoint, port); - } - /// /// Initializes a new instance of the class. /// @@ -496,13 +459,14 @@ private static Uri SanitizeEndpoint(string endpoint, int? port = null) HttpResponseMessage response = await this._httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); string responseContent = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + if (response.IsSuccessStatusCode) { - this._logger.LogTrace("Qdrant responded successfully"); + this._logger.LogDebug("Qdrant responded successfully"); } else { - this._logger.LogTrace("Qdrant responded with error"); + this._logger.LogWarning("Qdrant responded with error"); } return (response, responseContent); diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs index 71527bd02dd2..52ceb58a05c9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs @@ -32,7 +32,9 @@ namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate; /// The embedding data persists between subsequent instances and has similarity search capability. /// // ReSharper disable once ClassWithVirtualMembersNeverInherited.Global -public class WeaviateMemoryStore : IMemoryStore, IDisposable +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +public class WeaviateMemoryStore : IMemoryStore +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. { /// /// The authorization header name @@ -50,43 +52,10 @@ public class WeaviateMemoryStore : IMemoryStore, IDisposable }; private readonly HttpClient _httpClient; - private readonly bool _isSelfManagedHttpClient; private readonly ILogger _logger; - private bool _disposed; private readonly Uri? _endpoint = null; private string? _apiKey; - /// - /// Constructor for a memory store backed by Weaviate - /// - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions. Please use one of the alternative constructors.")] - public WeaviateMemoryStore(string scheme, string host, int port, string? apiKey = null, HttpClient? httpClient = null, ILogger? logger = null) - { - Verify.NotNullOrWhiteSpace(scheme); - Verify.NotNullOrWhiteSpace(host, "Host cannot be null or empty"); - - this._logger = logger ?? NullLogger.Instance; - if (httpClient == null) - { - this._httpClient = new(); - this._apiKey = apiKey; - if (!string.IsNullOrEmpty(apiKey)) - { - this._httpClient.DefaultRequestHeaders.Add(AuthorizationHeaderName, apiKey); - } - - // If not passed an HttpClient, then it is the responsibility of this class - // to ensure it is cleared up in the Dispose() method. - this._isSelfManagedHttpClient = true; - } - else - { - this._httpClient = httpClient; - } - - this._httpClient.BaseAddress = new($"{scheme}://{host}:{port}/v1/"); - } - /// /// Initializes a new instance of the class. /// @@ -127,13 +96,6 @@ public WeaviateMemoryStore(HttpClient httpClient, string? apiKey = null, string? this._httpClient = httpClient; } - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void Dispose() - { - this.Dispose(true); - GC.SuppressFinalize(this); - } - /// public async Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) { @@ -142,7 +104,7 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken string className = ToWeaviateFriendlyClassName(collectionName); string description = ToWeaviateFriendlyClassDescription(collectionName); - this._logger.LogTrace("Creating collection: {0}, with class name: {1}", collectionName, className); + this._logger.LogDebug("Creating collection: {0}, with class name: {1}", collectionName, className); using HttpRequestMessage request = CreateClassSchemaRequest.Create(className, description).Build(); @@ -158,7 +120,7 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken $"Name conflict for collection: {collectionName} with class name: {className}"); } - this._logger.LogTrace("Created collection: {0}, with class name: {1}", collectionName, className); + this._logger.LogDebug("Created collection: {0}, with class name: {1}", collectionName, className); } catch (HttpRequestException e) { @@ -173,7 +135,8 @@ public async Task DoesCollectionExistAsync(string collectionName, Cancella Verify.NotNullOrWhiteSpace(collectionName, "Collection name is empty"); string className = ToWeaviateFriendlyClassName(collectionName); - this._logger.LogTrace("Does collection exist: {0}, with class name: {1}:", collectionName, className); + + this._logger.LogDebug("Does collection exist: {0}, with class name: {1}:", collectionName, className); using HttpRequestMessage request = GetClassRequest.Create(className).Build(); @@ -185,7 +148,7 @@ public async Task DoesCollectionExistAsync(string collectionName, Cancella bool exists = response.StatusCode != HttpStatusCode.NotFound; if (!exists) { - this._logger.LogTrace("Collection: {0}, with class name: {1}, does not exist.", collectionName, className); + this._logger.LogDebug("Collection: {0}, with class name: {1}, does not exist.", collectionName, className); } else { @@ -212,7 +175,7 @@ public async Task DoesCollectionExistAsync(string collectionName, Cancella /// public async IAsyncEnumerable GetCollectionsAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { - this._logger.LogTrace("Listing collections"); + this._logger.LogDebug("Listing collections"); using HttpRequestMessage request = GetSchemaRequest.Create().Build(); string responseContent; @@ -244,7 +207,8 @@ public async Task DeleteCollectionAsync(string collectionName, CancellationToken Verify.NotNullOrWhiteSpace(collectionName, "Collection name is empty"); string className = ToWeaviateFriendlyClassName(collectionName); - this._logger.LogTrace("Deleting collection: {0}, with class name: {1}", collectionName, className); + + this._logger.LogDebug("Deleting collection: {0}, with class name: {1}", collectionName, className); if (await this.DoesCollectionExistAsync(collectionName, cancellationToken).ConfigureAwait(false)) { @@ -275,7 +239,7 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE { Verify.NotNullOrWhiteSpace(collectionName, "Collection name is empty"); - this._logger.LogTrace("Upsert vectors"); + this._logger.LogDebug("Upsert vectors"); string className = ToWeaviateFriendlyClassName(collectionName); BatchRequest requestBuilder = BatchRequest.Create(className); @@ -353,7 +317,7 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE embedding: new(weaviateObject.Vector ?? Array.Empty()), metadata: ToMetadata(weaviateObject)); - this._logger.LogTrace("Vector found with key: {0}", key); + this._logger.LogDebug("Vector found with key: {0}", key); return record; } @@ -383,7 +347,8 @@ public async Task RemoveAsync(string collectionName, string key, CancellationTok Verify.NotNull(key, "Key is NULL"); string className = ToWeaviateFriendlyClassName(collectionName); - this._logger.LogTrace("Deleting vector with key: {0}, from collection {1}, with class name: {2}:", key, collectionName, className); + + this._logger.LogDebug("Deleting vector with key: {0}, from collection {1}, with class name: {2}:", key, collectionName, className); DeleteObjectRequest requestBuilder = new() { @@ -397,7 +362,8 @@ public async Task RemoveAsync(string collectionName, string key, CancellationTok { (HttpResponseMessage response, string _) = await this.ExecuteHttpRequestAsync(request, cancellationToken).ConfigureAwait(false); response.EnsureSuccessStatusCode(); - this._logger.LogTrace("Vector deleted"); + + this._logger.LogDebug("Vector deleted"); } catch (HttpRequestException e) { @@ -420,8 +386,10 @@ public async Task RemoveBatchAsync(string collectionName, IEnumerable ke bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - this._logger.LogTrace("Searching top {0} nearest vectors", limit); Verify.NotNull(embedding, "The given vector is NULL"); + + this._logger.LogDebug("Searching top {0} nearest vectors", limit); + string className = ToWeaviateFriendlyClassName(collectionName); using HttpRequestMessage request = new CreateGraphRequest @@ -555,7 +523,9 @@ private static string ToWeaviateFriendlyClassName(string collectionName) HttpResponseMessage response = await this._httpClient.SendAsync(request, cancel).ConfigureAwait(false); string? responseContent = await response.Content.ReadAsStringAsync().ConfigureAwait(false); - this._logger.LogTrace("Weaviate responded with {0}", response.StatusCode); + + this._logger.LogDebug("Weaviate responded with {0}", response.StatusCode); + return (response, responseContent); } @@ -577,24 +547,4 @@ private static MemoryRecordMetadata ToMetadata(WeaviateObject weaviateObject) weaviateObject.Properties["sk_additional_metadata"].ToString() ); } - - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - protected virtual void Dispose(bool disposing) - { - if (this._disposed) - { - return; - } - - if (disposing) - { - // Clean-up the HttpClient if we created it. - if (this._isSelfManagedHttpClient) - { - this._httpClient.Dispose(); - } - } - - this._disposed = true; - } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/ConnectedClient.cs b/dotnet/src/Connectors/Connectors.UnitTests/ConnectedClient.cs new file mode 100644 index 000000000000..b47c192dbd61 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/ConnectedClient.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net; +using System.Net.WebSockets; + +namespace SemanticKernel.Connectors.UnitTests; + +internal sealed class ConnectedClient +{ + public Guid Id { get; } + public HttpListenerContext Context { get; } + public WebSocket? Socket { get; private set; } + + public ConnectedClient(Guid id, HttpListenerContext context) + { + this.Id = id; + this.Context = context; + } + + public void SetSocket(WebSocket socket) + { + this.Socket = socket; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj index 8e39171e637a..62baaacb23ed 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj @@ -31,6 +31,7 @@ + @@ -56,6 +57,12 @@ Always + + Always + + + Always + diff --git a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs index 07b7dea0a9b7..84f02d98280a 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs @@ -31,7 +31,7 @@ public HuggingFaceTextCompletionTests() public async Task SpecifiedModelShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -44,7 +44,7 @@ public async Task SpecifiedModelShouldBeUsedAsync() public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", apiKey: null, httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", apiKey: null, httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -57,7 +57,7 @@ public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", apiKey: "fake-api-key", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", apiKey: "fake-api-key", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -75,7 +75,7 @@ public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() public async Task UserAgentHeaderShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -93,7 +93,7 @@ public async Task UserAgentHeaderShouldBeUsedAsync() public async Task ProvidedEndpointShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -108,7 +108,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() //Arrange this.httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path"); - using var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -121,7 +121,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() public async Task DefaultAddressShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -134,7 +134,7 @@ public async Task DefaultAddressShouldBeUsedAsync() public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -147,7 +147,7 @@ public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() public async Task ShouldSendPromptToServiceAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this.httpClient); //Act await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); @@ -163,7 +163,7 @@ public async Task ShouldSendPromptToServiceAsync() public async Task ShouldHandleServiceResponseAsync() { //Arrange - using var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); + var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this.httpClient); //Act var result = await sut.GetCompletionsAsync("fake-text", new CompleteRequestSettings()); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs index 6a3550675ea4..dfdf49ae2dc4 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs @@ -31,7 +31,7 @@ public HuggingFaceEmbeddingGenerationTests() public async Task SpecifiedModelShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -44,7 +44,7 @@ public async Task SpecifiedModelShouldBeUsedAsync() public async Task UserAgentHeaderShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -62,7 +62,7 @@ public async Task UserAgentHeaderShouldBeUsedAsync() public async Task ProvidedEndpointShouldBeUsedAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -77,7 +77,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() //Arrange this.httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path"); - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -90,7 +90,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, endpoint: "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, endpoint: "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -103,7 +103,7 @@ public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() public async Task ShouldSendDataToServiceAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); var data = new List() { "test_string_1", "test_string_2", "test_string_3" }; //Act @@ -120,7 +120,7 @@ public async Task ShouldSendDataToServiceAsync() public async Task ShouldHandleServiceResponseAsync() { //Arrange - using var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this.httpClient, "https://fake-random-test-host/fake-path"); //Act var embeddings = await sut.GenerateEmbeddingsAsync(new List()); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureCognitiveSearchMemoryTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureCognitiveSearchMemoryTests.cs deleted file mode 100644 index 5d71f9150ee1..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureCognitiveSearchMemoryTests.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading.Tasks; -using Azure.Core; -using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.Memory.AzureCognitiveSearch; - -/// -/// Unit tests for class. -/// -public sealed class AzureCognitiveSearchMemoryTests : IDisposable -{ - private HttpMessageHandlerStub messageHandlerStub; - private HttpClient httpClient; - - public AzureCognitiveSearchMemoryTests() - { - this.messageHandlerStub = new HttpMessageHandlerStub(); - - this.httpClient = new HttpClient(this.messageHandlerStub, false); - } - - [Fact] - public async Task CustomHttpClientProvidedToFirstConstructorShouldBeUsed() - { - //Arrange - var sut = new AzureCognitiveSearchMemory("https://fake-random-test-host/fake-path", "fake-api-key", this.httpClient); - - //Act - await sut.GetAsync("fake-collection", "fake-query"); - - //Assert - Assert.StartsWith("https://fake-random-test-host/fake-path/indexes('fake-collection')", this.messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task CustomHttpClientProvidedToSecondConstructorShouldBeUsed() - { - //Arrange - var credentials = DelegatedTokenCredential.Create((_, __) => new AccessToken("fake-access-token", DateTimeOffset.UtcNow.AddMinutes(15))); - - var sut = new AzureCognitiveSearchMemory("https://fake-random-test-host/fake-path", credentials, this.httpClient); - - //Act - await sut.GetAsync("fake-collection", "fake-key"); - - //Assert - Assert.StartsWith("https://fake-random-test-host/fake-path/indexes('fake-collection')", this.messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - public void Dispose() - { - this.httpClient.Dispose(); - this.messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensionsTests.cs deleted file mode 100644 index aa8586a01835..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/AzureCognitiveSearch/AzureSearchServiceKernelBuilderExtensionsTests.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net.Http; -using System.Net.Mime; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.Memory.AzureCognitiveSearch; - -public sealed class AzureSearchServiceKernelBuilderExtensionsTests : IDisposable -{ - private HttpMessageHandlerStub messageHandlerStub; - private HttpClient httpClient; - - public AzureSearchServiceKernelBuilderExtensionsTests() - { - this.messageHandlerStub = new HttpMessageHandlerStub(); - - this.httpClient = new HttpClient(this.messageHandlerStub, false); - } - - [Fact] - public async Task AzureCognitiveSearchMemoryStoreShouldBeProperlyInitialized() - { - //Arrange - this.messageHandlerStub.ResponseToReturn.Content = new StringContent("{\"value\": [{\"name\": \"fake-index1\"}]}", Encoding.UTF8, MediaTypeNames.Application.Json); - - var builder = new KernelBuilder(); - builder.WithAzureCognitiveSearchMemory("https://fake-random-test-host/fake-path", "fake-api-key", this.httpClient); - builder.WithAzureTextEmbeddingGenerationService("fake-deployment-name", "https://fake-random-test-host/fake-path1", "fake -api-key"); - var kernel = builder.Build(); //This call triggers the internal factory registered by WithAzureAzureCognitiveSearchMemory method to create an instance of the AzureCognitiveSearchMemory class. - - //Act - await kernel.Memory.GetCollectionsAsync(); //This call triggers a subsequent call to Azure Cognitive Search Memory store. - - //Assert - Assert.Equal("https://fake-random-test-host/fake-path/indexes?$select=%2A&api-version=2021-04-30-Preview", this.messageHandlerStub?.RequestUri?.AbsoluteUri); - - var headerValues = Enumerable.Empty(); - var headerExists = this.messageHandlerStub?.RequestHeaders?.TryGetValues("Api-Key", out headerValues); - Assert.True(headerExists); - Assert.Contains(headerValues!, (value) => value == "fake-api-key"); - } - - public void Dispose() - { - this.httpClient.Dispose(); - this.messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs index 79a88e3e0f91..7c16caa58040 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -27,6 +27,7 @@ public sealed class ChromaMemoryStoreTests : IDisposable private readonly HttpMessageHandlerStub _messageHandlerStub; private readonly HttpClient _httpClient; private readonly Mock _chromaClientMock; + private readonly JsonSerializerOptions _serializerOptions; public ChromaMemoryStoreTests() { @@ -37,6 +38,11 @@ public ChromaMemoryStoreTests() this._chromaClientMock .Setup(client => client.GetCollectionAsync(CollectionName, CancellationToken.None)) .ReturnsAsync(new ChromaCollectionModel { Id = CollectionId, Name = CollectionName }); + + this._serializerOptions = new JsonSerializerOptions + { + Converters = { new ChromaBooleanConverter() } + }; } [Fact] @@ -102,12 +108,12 @@ public async Task ItThrowsExceptionOnNonExistentCollectionDeletionAsync() { // Arrange const string collectionName = "non-existent-collection"; - const string deleteNonExistentCollectionErrorMessage = "list index out of range"; + const string collectionDoesNotExistErrorMessage = $"Collection {collectionName} does not exist"; const string expectedExceptionMessage = $"Cannot delete non-existent collection {collectionName}"; this._chromaClientMock .Setup(client => client.DeleteCollectionAsync(collectionName, CancellationToken.None)) - .Throws(new ChromaClientException(deleteNonExistentCollectionErrorMessage)); + .Throws(new ChromaClientException(collectionDoesNotExistErrorMessage)); var store = new ChromaMemoryStore(this._chromaClientMock.Object); @@ -310,7 +316,7 @@ private MemoryRecord GetRandomMemoryRecord(Embedding? embedding = null) private Dictionary GetEmbeddingMetadataFromMemoryRecord(MemoryRecord memoryRecord) { - var serialized = JsonSerializer.Serialize(memoryRecord.Metadata); + var serialized = JsonSerializer.Serialize(memoryRecord.Metadata, this._serializerOptions); return JsonSerializer.Deserialize>(serialized)!; } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs index 545a2ff3a05c..db8b0b8c2401 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs @@ -36,16 +36,6 @@ public class QdrantMemoryStoreTests private readonly Embedding _embedding3 = new(new float[] { 3, 3, 3 }); private readonly Mock> _mockLogger = new(); - [Fact] - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void ConnectionCanBeInitialized() - { - // Arrange - var httpMock = new Mock(); - var qdrantClient = new QdrantVectorDbClient("http://localhost", 3, 1000, httpMock.Object); - var db = new QdrantMemoryStore(qdrantClient); - } - [Fact] public async Task ItCreatesNewCollectionAsync() { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs index b1c42eb16176..39a4982d75ff 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs @@ -232,46 +232,6 @@ public async Task ItReturnsEmptyListIfNearestMatchesNotFoundAsync() Assert.Empty(similarityResults); } - [Fact] - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public async Task ScoredVectorSupportsIntegerIdsObsolete() - { - // Arrange - var payloadId = "payloadId"; - var metadataId = "metadataId"; - var expectedId = 100; - - var scoredPointJsonWithIntegerId = - "{" + - "\"result\": " + - " [{" + - "\"id\": " + expectedId + "," + - "\"version\": 0," + - "\"score\": null," + - "\"payload\": {}," + - "\"vector\": null " + - "}]" + - "}"; - - using (var httpResponseMessage = new HttpResponseMessage { StatusCode = HttpStatusCode.OK, Content = new StringContent(scoredPointJsonWithIntegerId) }) - { - var mockHttpMessageHandler = new Mock(); - mockHttpMessageHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(httpResponseMessage); - - //Act - using var httpClient = new HttpClient(mockHttpMessageHandler.Object); - { - var client = new QdrantVectorDbClient("http://localhost", 1536, null, httpClient); - var result = await client.GetVectorByPayloadIdAsync(payloadId, metadataId); - - //Assert - Assert.Equal(result!.PointId, expectedId.ToString(CultureInfo.InvariantCulture)); - } - } - } - [Fact] public async Task ScoredVectorSupportsIntegerIds() { @@ -310,84 +270,4 @@ public async Task ScoredVectorSupportsIntegerIds() } } } - - [Fact] - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public async Task ScoredVectorSupportsStringIdsObsolete() - { - // Arrange - var payloadId = "payloadId"; - var metadataId = "metadataId"; - var expectedId = Guid.NewGuid().ToString(); - - var scoredPointJsonWithIntegerId = - "{" + - "\"result\": " + - " [{" + - "\"id\": \"" + expectedId + "\"," + - "\"version\": 0," + - "\"score\": null," + - "\"payload\": {}," + - "\"vector\": null " + - "}]" + - "}"; - - using (var httpResponseMessage = new HttpResponseMessage { StatusCode = HttpStatusCode.OK, Content = new StringContent(scoredPointJsonWithIntegerId) }) - { - var mockHttpMessageHandler = new Mock(); - mockHttpMessageHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(httpResponseMessage); - - //Act - using var httpClient = new HttpClient(mockHttpMessageHandler.Object); - { - var client = new QdrantVectorDbClient("http://localhost", 1536, null, httpClient); - var result = await client.GetVectorByPayloadIdAsync(payloadId, metadataId); - - //Assert - Assert.Equal(result!.PointId, expectedId); - } - } - } - - [Fact] - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public async Task ScoredVectorSupportsStringIds() - { - // Arrange - var payloadId = "payloadId"; - var metadataId = "metadataId"; - var expectedId = Guid.NewGuid().ToString(); - - var scoredPointJsonWithIntegerId = - "{" + - "\"result\": " + - " [{" + - "\"id\": \"" + expectedId + "\"," + - "\"version\": 0," + - "\"score\": null," + - "\"payload\": {}," + - "\"vector\": null " + - "}]" + - "}"; - - using (var httpResponseMessage = new HttpResponseMessage { StatusCode = HttpStatusCode.OK, Content = new StringContent(scoredPointJsonWithIntegerId) }) - { - var mockHttpMessageHandler = new Mock(); - mockHttpMessageHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(httpResponseMessage); - - //Act - using var httpClient = new HttpClient(mockHttpMessageHandler.Object); - { - var client = new QdrantVectorDbClient(httpClient, 1536, "https://fake-random-test-host"); - var result = await client.GetVectorByPayloadIdAsync(payloadId, metadataId); - - //Assert - Assert.Equal(result!.PointId, expectedId); - } - } - } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs index c1ecda8a77cc..bf64563e5276 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs @@ -44,7 +44,7 @@ public WeaviateMemoryStoreTests() public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() { //Arrange - using var sut = new WeaviateMemoryStore(this.httpClient, null, "https://fake-random-test-host/fake-path"); + var sut = new WeaviateMemoryStore(this.httpClient, null, "https://fake-random-test-host/fake-path"); //Act await sut.GetAsync("fake-collection", "fake-key"); @@ -57,7 +57,7 @@ public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() { //Arrange - using var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key", "https://fake-random-test-host/fake-path"); + var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key", "https://fake-random-test-host/fake-path"); //Act await sut.GetAsync("fake-collection", "fake-key"); @@ -75,7 +75,7 @@ public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() public async Task ProvidedEndpointShouldBeUsedAsync() { //Arrange - using var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key", "https://fake-random-test-host/fake-path/"); + var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key", "https://fake-random-test-host/fake-path/"); //Act await sut.GetAsync("fake-collection", "fake-key"); @@ -90,7 +90,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() //Arrange this.httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path/"); - using var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key"); + var sut = new WeaviateMemoryStore(this.httpClient, "fake-api-key"); //Act await sut.GetAsync("fake-collection", "fake-key"); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaTestHelper.cs b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaTestHelper.cs new file mode 100644 index 000000000000..0df5eda9dd19 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaTestHelper.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Moq; +using Moq.Protected; + +namespace SemanticKernel.Connectors.UnitTests.Oobabooga; + +/// +/// Helper for Oobabooga test purposes. +/// +internal static class OobaboogaTestHelper +{ + /// + /// Reads test response from file for mocking purposes. + /// + /// Name of the file with test response. + internal static string GetTestResponse(string fileName) + { + return File.ReadAllText($"./Oobabooga/TestData/{fileName}"); + } + + /// + /// Returns mocked instance of . + /// + /// Message to return for mocked . + internal static HttpClientHandler GetHttpClientHandlerMock(HttpResponseMessage httpResponseMessage) + { + var httpClientHandler = new Mock(); + + httpClientHandler + .Protected() + .Setup>( + "SendAsync", + ItExpr.IsAny(), + ItExpr.IsAny()) + .ReturnsAsync(httpResponseMessage); + + return httpClientHandler.Object; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaWebSocketTestServer.cs b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaWebSocketTestServer.cs new file mode 100644 index 000000000000..d9210603a8fd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/OobaboogaWebSocketTestServer.cs @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; + +namespace SemanticKernel.Connectors.UnitTests.Oobabooga; + +/// +/// Represents a WebSocket test server specifically designed for the Oobabooga text completion service. +/// It inherits from the base WebSocketTestServer class and handles Oobabooga-specific request and response classes. +/// The server accepts WebSocket connections, receives requests, and generates responses based on the Oobabooga text completion logic. +/// The OobaboogaWebSocketTestServer class uses a delegate to handle the request and response logic, allowing customization of the behavior. +/// +internal sealed class OobaboogaWebSocketTestServer : WebSocketTestServer +{ + public OobaboogaWebSocketTestServer(string url, Func> stringHandler, ILogger? logger = null) + : base(url, bytes => HandleRequest(bytes, stringHandler), logger: logger) + { + } + + private static List> HandleRequest(ArraySegment request, Func> stringHandler) + { + var requestString = Encoding.UTF8.GetString(request.ToArray()); + var requestObj = JsonSerializer.Deserialize(requestString); + + var responseList = stringHandler(requestObj?.Prompt ?? string.Empty); + + var responseSegments = new List>(); + int messageNum = 0; + foreach (var responseChunk in responseList) + { + var responseObj = new TextCompletionStreamingResponse + { + Event = "text_stream", + MessageNum = messageNum, + Text = responseChunk + }; + + var responseJson = JsonSerializer.Serialize(responseObj); + var responseBytes = Encoding.UTF8.GetBytes(responseJson); + responseSegments.Add(new ArraySegment(responseBytes)); + + messageNum++; + } + + var streamEndObj = new TextCompletionStreamingResponse + { + Event = "stream_end", + MessageNum = messageNum + }; + + var streamEndJson = JsonSerializer.Serialize(streamEndObj); + var streamEndBytes = Encoding.UTF8.GetBytes(streamEndJson); + responseSegments.Add(new ArraySegment(streamEndBytes)); + + return responseSegments; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_response.json new file mode 100644 index 000000000000..397ee62436d5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_response.json @@ -0,0 +1,9 @@ +{ + "results": [ + { + "text": "This is test completion response" + + } + ] + +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_streaming_response.json b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_streaming_response.json new file mode 100644 index 000000000000..bf731d314094 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TestData/completion_test_streaming_response.json @@ -0,0 +1,5 @@ +{ + "event": "text_stream", + "message_num": 0, + "text": "This is test completion response" +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TextCompletion/OobaboogaTextCompletionTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TextCompletion/OobaboogaTextCompletionTests.cs new file mode 100644 index 000000000000..65810789802d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Oobabooga/TextCompletion/OobaboogaTextCompletionTests.cs @@ -0,0 +1,405 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Net.WebSockets; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AI.TextCompletion; +using Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Connectors.UnitTests.Oobabooga.TextCompletion; + +/// +/// Unit tests for class. +/// +public sealed class OobaboogaTextCompletionTests : IDisposable +{ + private readonly XunitLogger _logger; + private const string EndPoint = "https://fake-random-test-host"; + private const int BlockingPort = 1234; + private const int StreamingPort = 2345; + private const string CompletionText = "fake-test"; + private const string CompletionMultiText = "Hello, my name is"; + + private HttpMessageHandlerStub _messageHandlerStub; + private HttpClient _httpClient; + private Uri _endPointUri; + private string _streamCompletionResponseStub; + + public OobaboogaTextCompletionTests(ITestOutputHelper output) + { + this._logger = new XunitLogger(output); + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent(OobaboogaTestHelper.GetTestResponse("completion_test_response.json")); + this._streamCompletionResponseStub = OobaboogaTestHelper.GetTestResponse("completion_test_streaming_response.json"); + + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._endPointUri = new Uri(EndPoint); + } + + [Fact] + public async Task UserAgentHeaderShouldBeUsedAsync() + { + //Arrange + var sut = new OobaboogaTextCompletion(endpoint: this._endPointUri, + blockingPort: BlockingPort, + httpClient: this._httpClient, + logger: this._logger); + + //Act + await sut.GetCompletionsAsync(CompletionText, new CompleteRequestSettings()); + + //Assert + Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent")); + + var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent"); + + var value = values.SingleOrDefault(); + Assert.Equal(OobaboogaTextCompletion.HttpUserAgent, value); + } + + [Fact] + public async Task ProvidedEndpointShouldBeUsedAsync() + { + //Arrange + var sut = new OobaboogaTextCompletion(endpoint: this._endPointUri, + blockingPort: BlockingPort, + httpClient: this._httpClient, + logger: this._logger); + + //Act + await sut.GetCompletionsAsync(CompletionText, new CompleteRequestSettings()); + + //Assert + Assert.StartsWith(EndPoint, this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task BlockingUrlShouldBeBuiltSuccessfullyAsync() + { + //Arrange + var sut = new OobaboogaTextCompletion(endpoint: this._endPointUri, + blockingPort: BlockingPort, + httpClient: this._httpClient, + logger: this._logger); + + //Act + await sut.GetCompletionsAsync(CompletionText, new CompleteRequestSettings()); + var expectedUri = new UriBuilder(this._endPointUri) + { + Path = OobaboogaTextCompletion.BlockingUriPath, + Port = BlockingPort + }; + + //Assert + Assert.Equal(expectedUri.Uri, this._messageHandlerStub.RequestUri); + } + + [Fact] + public async Task ShouldSendPromptToServiceAsync() + { + //Arrange + var sut = new OobaboogaTextCompletion(endpoint: this._endPointUri, + blockingPort: BlockingPort, + httpClient: this._httpClient, + logger: this._logger); + + //Act + await sut.GetCompletionsAsync(CompletionText, new CompleteRequestSettings()); + + //Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + + Assert.Equal(CompletionText, requestPayload.Prompt); + } + + [Fact] + public async Task ShouldHandleServiceResponseAsync() + { + //Arrange + var sut = new OobaboogaTextCompletion(endpoint: this._endPointUri, + blockingPort: BlockingPort, + httpClient: this._httpClient, + logger: this._logger); + + //Act + var result = await sut.GetCompletionsAsync(CompletionText, new CompleteRequestSettings()); + + //Assert + Assert.NotNull(result); + + var completions = result.SingleOrDefault(); + Assert.NotNull(completions); + + var completion = await completions.GetCompletionAsync(); + Assert.Equal("This is test completion response", completion); + } + + [Fact] + public async Task ShouldHandleStreamingServicePersistentWebSocketResponseAsync() + { + var requestMessage = CompletionText; + var expectedResponse = new List { this._streamCompletionResponseStub }; + await this.RunWebSocketMultiPacketStreamingTestAsync( + requestMessage: requestMessage, + expectedResponse: expectedResponse, + isPersistent: true).ConfigureAwait(false); + } + + [Fact] + public async Task ShouldHandleStreamingServiceTransientWebSocketResponseAsync() + { + var requestMessage = CompletionText; + var expectedResponse = new List { this._streamCompletionResponseStub }; + await this.RunWebSocketMultiPacketStreamingTestAsync( + requestMessage: requestMessage, + expectedResponse: expectedResponse).ConfigureAwait(false); + } + + [Fact] + public async Task ShouldHandleConcurrentWebSocketConnectionsAsync() + { + var serverUrl = $"http://localhost:{StreamingPort}/"; + var clientUrl = $"ws://localhost:{StreamingPort}/"; + var expectedResponses = new List + { + "Response 1", + "Response 2", + "Response 3", + "Response 4", + "Response 5" + }; + + await using var server = new WebSocketTestServer(serverUrl, request => + { + // Simulate different responses for each request + var responseIndex = int.Parse(Encoding.UTF8.GetString(request.ToArray()), CultureInfo.InvariantCulture); + byte[] bytes = Encoding.UTF8.GetBytes(expectedResponses[responseIndex]); + var toReturn = new List> { new ArraySegment(bytes) }; + return toReturn; + }); + + var tasks = new List>(); + + // Simulate multiple concurrent WebSocket connections + for (int i = 0; i < expectedResponses.Count; i++) + { + var currentIndex = i; + tasks.Add(Task.Run(async () => + { + using var client = new ClientWebSocket(); + await client.ConnectAsync(new Uri(clientUrl), CancellationToken.None); + + // Send a request to the server + var requestBytes = Encoding.UTF8.GetBytes(currentIndex.ToString(CultureInfo.InvariantCulture)); + await client.SendAsync(new ArraySegment(requestBytes), WebSocketMessageType.Text, true, CancellationToken.None); + + // Receive the response from the server + var responseBytes = new byte[1024]; + var responseResult = await client.ReceiveAsync(new ArraySegment(responseBytes), CancellationToken.None); + await client.CloseAsync(WebSocketCloseStatus.NormalClosure, "Close connection after message received", CancellationToken.None).ConfigureAwait(false); + + var response = Encoding.UTF8.GetString(responseBytes, 0, responseResult.Count); + + return response; + })); + } + + // Assert + for (int i = 0; i < expectedResponses.Count; i++) + { + var response = await tasks[i].ConfigureAwait(false); + Assert.Equal(expectedResponses[i], response); + } + } + + [Fact] + public async Task ShouldHandleMultiPacketStreamingServiceTransientWebSocketResponseAsync() + { + await this.RunWebSocketMultiPacketStreamingTestAsync().ConfigureAwait(false); + } + + [Fact] + public async Task ShouldHandleMultiPacketStreamingServicePersistentWebSocketResponseBroadcastBlockAsync() + { + await this.RunWebSocketMultiPacketStreamingTestAsync(isPersistent: true).ConfigureAwait(false); + } + + [Fact] + public async Task ShouldHandleConcurrentMultiPacketStreamingServiceTransientWebSocketResponseAsync() + { + await this.RunWebSocketMultiPacketStreamingTestAsync(nbConcurrentCalls: 10).ConfigureAwait(false); + } + + [Fact] + public async Task ShouldHandleConcurrentMultiPacketStreamingServicePersistentWebSocketResponseAsync() + { + await this.RunWebSocketMultiPacketStreamingTestAsync(nbConcurrentCalls: 10, isPersistent: true).ConfigureAwait(false); + } + + /// + /// This test will assess concurrent enumeration of the same long multi message (500 websocket messages) streaming result. + /// + [Fact] + public async Task ShouldHandleConcurrentEnumerationOfLongStreamingServiceResponseAsync() + { + var expectedResponse = Enumerable.Range(0, 500).Select(i => i.ToString(CultureInfo.InvariantCulture)).ToList(); + using SemaphoreSlim enforcedConcurrentCallSemaphore = new(20); + await this.RunWebSocketMultiPacketStreamingTestAsync( + expectedResponse: expectedResponse, + nbConcurrentCalls: 1, + nbConcurrentEnumeration: 100, + isPersistent: true, + keepAliveWebSocketsDuration: 100, + concurrentCallsTicksDelay: 0, + enforcedConcurrentCallSemaphore: enforcedConcurrentCallSemaphore, + maxExpectedNbClients: 20).ConfigureAwait(false); + } + + private async Task RunWebSocketMultiPacketStreamingTestAsync( + string requestMessage = CompletionMultiText, + List? expectedResponse = null, + int nbConcurrentCalls = 1, + int nbConcurrentEnumeration = 1, + bool isPersistent = false, + int requestProcessingDuration = 0, + int segmentMessageDelay = 0, + int keepAliveWebSocketsDuration = 100, + int concurrentCallsTicksDelay = 0, + SemaphoreSlim? enforcedConcurrentCallSemaphore = null, + int maxExpectedNbClients = 0, + int maxTestDuration = 0) + { + if (expectedResponse == null) + { + expectedResponse = new List { " John", ". I", "'m a", " writer" }; + } + + Func? webSocketFactory = null; + // Counter to track the number of WebSocket clients created + int clientCount = 0; + var delayTimeSpan = new TimeSpan(concurrentCallsTicksDelay); + if (isPersistent) + { + ClientWebSocket ExternalWebSocketFactory() + { + this._logger?.LogInformation(message: "Creating new client web socket"); + var toReturn = new ClientWebSocket(); + return toReturn; + } + + if (maxExpectedNbClients > 0) + { + ClientWebSocket IncrementFactory() + { + var toReturn = ExternalWebSocketFactory(); + Interlocked.Increment(ref clientCount); + return toReturn; + } + + webSocketFactory = IncrementFactory; + } + else + { + webSocketFactory = ExternalWebSocketFactory; + } + } + + using var cleanupToken = new CancellationTokenSource(); + + var sut = new OobaboogaTextCompletion( + endpoint: new Uri("http://localhost/"), + streamingPort: StreamingPort, + httpClient: this._httpClient, + webSocketsCleanUpCancellationToken: cleanupToken.Token, + webSocketFactory: webSocketFactory, + keepAliveWebSocketsDuration: keepAliveWebSocketsDuration, + concurrentSemaphore: enforcedConcurrentCallSemaphore, + logger: this._logger); + + await using var server = new OobaboogaWebSocketTestServer($"http://localhost:{StreamingPort}/", request => expectedResponse, logger: this._logger) + { + RequestProcessingDelay = TimeSpan.FromMilliseconds(requestProcessingDuration), + SegmentMessageDelay = TimeSpan.FromMilliseconds(segmentMessageDelay) + }; + + var sw = Stopwatch.StartNew(); + var tasks = new List>>(); + + for (int i = 0; i < nbConcurrentCalls; i++) + { + tasks.Add(Task.Run(() => + { + var localResponse = sut.CompleteStreamAsync(requestMessage, new CompleteRequestSettings() + { + Temperature = 0.01, + MaxTokens = 7, + TopP = 0.1, + }, cancellationToken: cleanupToken.Token); + return localResponse; + })); + } + + var callEnumerationTasks = new List>>(); + await Task.WhenAll(tasks).ConfigureAwait(false); + + foreach (var callTask in tasks) + { + callEnumerationTasks.AddRange(Enumerable.Range(0, nbConcurrentEnumeration).Select(_ => Task.Run(async () => + { + var completion = await callTask.ConfigureAwait(false); + var result = new List(); + await foreach (var chunk in completion) + { + result.Add(chunk); + } + + return result; + }))); + + // Introduce a delay between creating each WebSocket client + await Task.Delay(delayTimeSpan).ConfigureAwait(false); + } + + var allResults = await Task.WhenAll(callEnumerationTasks).ConfigureAwait(false); + + var elapsed = sw.ElapsedMilliseconds; + if (maxExpectedNbClients > 0) + { + Assert.InRange(clientCount, 1, maxExpectedNbClients); + } + + // Validate all results + foreach (var result in allResults) + { + Assert.Equal(expectedResponse.Count, result.Count); + for (int i = 0; i < expectedResponse.Count; i++) + { + Assert.Equal(expectedResponse[i], result[i]); + } + } + + if (maxTestDuration > 0) + { + Assert.InRange(elapsed, 0, maxTestDuration); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + this._logger.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/KernelConfigOpenAIExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/KernelConfigOpenAIExtensionsTests.cs deleted file mode 100644 index ee3fae0471aa..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/KernelConfigOpenAIExtensionsTests.cs +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests of . -/// -[System.Obsolete("All the methods of this class are deprecated and it will be removed in one of the next SK SDK versions.")] -public class KernelConfigOpenAIExtensionsTests -{ - [Fact] - public void ItSucceedsWhenAddingDifferentServiceTypeWithSameId() - { - var target = new KernelConfig(); - target.AddAzureTextCompletionService("depl", "https://url", "key", serviceId: "azure"); - target.AddAzureTextEmbeddingGenerationService("depl2", "https://url", "key", serviceId: "azure"); - - Assert.True(target.TextCompletionServices.ContainsKey("azure")); - Assert.True(target.TextEmbeddingGenerationServices.ContainsKey("azure")); - } - - [Fact] - public void ItTellsIfAServiceIsAvailable() - { - // Arrange - var target = new KernelConfig(); - target.AddAzureTextCompletionService("deployment1", "https://url", "key", serviceId: "azure"); - target.AddOpenAITextCompletionService("model", "apikey", serviceId: "oai"); - target.AddAzureTextEmbeddingGenerationService("deployment2", "https://url2", "key", serviceId: "azure"); - target.AddOpenAITextEmbeddingGenerationService("model2", "apikey2", serviceId: "oai2"); - - // Assert - Assert.True(target.TextCompletionServices.ContainsKey("azure")); - Assert.True(target.TextCompletionServices.ContainsKey("oai")); - Assert.True(target.TextEmbeddingGenerationServices.ContainsKey("azure")); - Assert.True(target.TextEmbeddingGenerationServices.ContainsKey("oai2")); - - Assert.False(target.TextCompletionServices.ContainsKey("azure2")); - Assert.False(target.TextCompletionServices.ContainsKey("oai2")); - Assert.False(target.TextEmbeddingGenerationServices.ContainsKey("azure1")); - Assert.False(target.TextEmbeddingGenerationServices.ContainsKey("oai")); - } - - [Fact] - public void ItCanOverwriteServices() - { - // Arrange - var target = new KernelConfig(); - - // Act - Assert no exception occurs - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "one"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "one"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "one"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "one"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "one"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "one"); - } - - [Fact] - public void ItCanRemoveAllServices() - { - // Arrange - var target = new KernelConfig(); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "2"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "3"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "4"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "5"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "6"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "7"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "8"); - - // Act - target.RemoveAllTextCompletionServices(); - target.RemoveAllTextEmbeddingGenerationServices(); - - // Assert - Assert.Empty(target.TextEmbeddingGenerationServices); - Assert.Empty(target.TextCompletionServices); - } - - [Fact] - public void ItCanRemoveAllTextCompletionServices() - { - // Arrange - var target = new KernelConfig(); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "2"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "3"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "4"); - - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "5"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "6"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "7"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "8"); - - // Act - target.RemoveAllTextCompletionServices(); - - // Assert (+1 for the default) - Assert.Equal(4 + 1, target.TextEmbeddingGenerationServices.Count); - } - - [Fact] - public void ItCanRemoveAllTextEmbeddingGenerationServices() - { - // Arrange - var target = new KernelConfig(); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - target.AddAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "2"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "3"); - target.AddOpenAITextCompletionService("model", "key", serviceId: "4"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "5"); - target.AddAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "6"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "7"); - target.AddOpenAITextEmbeddingGenerationService("model", "key", serviceId: "8"); - - // Act - target.RemoveAllTextEmbeddingGenerationServices(); - - // Assert (+1 for the default) - Assert.Equal(4 + 1, target.TextCompletionServices.Count); - Assert.Empty(target.TextEmbeddingGenerationServices); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/WebSocketTestServer.cs b/dotnet/src/Connectors/Connectors.UnitTests/WebSocketTestServer.cs new file mode 100644 index 000000000000..11eafcb24ef2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/WebSocketTestServer.cs @@ -0,0 +1,223 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.WebSockets; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace SemanticKernel.Connectors.UnitTests; + +internal class WebSocketTestServer : IDisposable +{ + private readonly ILogger? _logger; + + private readonly HttpListener _httpListener; + private readonly CancellationTokenSource _mainCancellationTokenSource; + private readonly CancellationTokenSource _socketCancellationTokenSource; + private bool _serverIsRunning; + + private Func, List>> _arraySegmentHandler; + private readonly ConcurrentDictionary> _requestContentQueues; + private readonly ConcurrentBag _runningTasks = new(); + + private readonly ConcurrentDictionary _clients = new(); + + public TimeSpan RequestProcessingDelay { get; set; } = TimeSpan.Zero; + public TimeSpan SegmentMessageDelay { get; set; } = TimeSpan.Zero; + + public ConcurrentDictionary RequestContents + { + get + { + return new ConcurrentDictionary( + this._requestContentQueues + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value.ToList().SelectMany(bytes => bytes).ToArray())); + } + } + + public WebSocketTestServer(string url, Func, List>> arraySegmentHandler, ILogger? logger = null) + { + this._logger = logger; + + this._arraySegmentHandler = arraySegmentHandler; + this._requestContentQueues = new ConcurrentDictionary>(); + + this._mainCancellationTokenSource = new(); + this._socketCancellationTokenSource = new(); + + this._httpListener = new HttpListener(); + this._httpListener.Prefixes.Add(url); + this._httpListener.Start(); + this._serverIsRunning = true; + + Task.Run((Func)this.HandleRequestsAsync, this._mainCancellationTokenSource.Token); + } + + private async Task HandleRequestsAsync() + { + while (!this._mainCancellationTokenSource.IsCancellationRequested) + { + var context = await this._httpListener.GetContextAsync().ConfigureAwait(false); + + if (this._serverIsRunning) + { + if (context.Request.IsWebSocketRequest) + { + var connectedClient = new ConnectedClient(Guid.NewGuid(), context); + this._clients[connectedClient.Id] = connectedClient; + try + { + var socketContext = await context.AcceptWebSocketAsync(subProtocol: null); + connectedClient.SetSocket(socketContext.WebSocket); + this._runningTasks.Add(this.HandleSingleWebSocketRequestAsync(connectedClient)); + } + catch + { + // server error if upgrade from HTTP to WebSocket fails + context.Response.StatusCode = 500; + context.Response.StatusDescription = "WebSocket upgrade failed"; + context.Response.Close(); + throw; + } + } + } + else + { + // HTTP 409 Conflict (with server's current state) + context.Response.StatusCode = 409; + context.Response.StatusDescription = "Server is shutting down"; + context.Response.Close(); + return; + } + } + + await Task.WhenAll(this._runningTasks).ConfigureAwait(false); + } + + private async Task HandleSingleWebSocketRequestAsync(ConnectedClient connectedClient) + { + var buffer = WebSocket.CreateServerBuffer(4096); + + Guid requestId = connectedClient.Id; + this._requestContentQueues[requestId] = new ConcurrentQueue(); + + try + { + while (!this._socketCancellationTokenSource.IsCancellationRequested && connectedClient.Socket != null && connectedClient.Socket.State != WebSocketState.Closed && connectedClient.Socket.State != WebSocketState.Aborted) + { + WebSocketReceiveResult result = await connectedClient.Socket.ReceiveAsync(buffer, this._socketCancellationTokenSource.Token).ConfigureAwait(false); + if (!this._socketCancellationTokenSource.IsCancellationRequested && connectedClient.Socket.State != WebSocketState.Closed && connectedClient.Socket.State != WebSocketState.Aborted) + { + if (connectedClient.Socket.State == WebSocketState.CloseReceived && result.MessageType == WebSocketMessageType.Close) + { + await connectedClient.Socket.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Acknowledge Close frame", CancellationToken.None); + + break; + } + + var receivedBytes = buffer.Slice(0, result.Count); + this._requestContentQueues[requestId].Enqueue(receivedBytes.ToArray()); + + if (result.EndOfMessage) + { + var responseSegments = this._arraySegmentHandler(receivedBytes); + + if (this.RequestProcessingDelay.Ticks > 0) + { + await Task.Delay(this.RequestProcessingDelay).ConfigureAwait(false); + } + + foreach (var responseSegment in responseSegments) + { + if (connectedClient.Socket.State != WebSocketState.Open) + { + break; + } + + if (this.SegmentMessageDelay.Ticks > 0) + { + await Task.Delay(this.SegmentMessageDelay).ConfigureAwait(false); + } + + await connectedClient.Socket.SendAsync(responseSegment, WebSocketMessageType.Text, true, this._socketCancellationTokenSource.Token).ConfigureAwait(false); + } + } + } + } + + if (connectedClient.Socket?.State == WebSocketState.Open) + { + await connectedClient.Socket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Closing waiting for acknowledgement", CancellationToken.None).ConfigureAwait(false); + } + else if (connectedClient.Socket?.State == WebSocketState.CloseReceived) + { + await connectedClient.Socket.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Closing without waiting for acknowledgment", CancellationToken.None).ConfigureAwait(false); + } + } + catch (OperationCanceledException exception) + { + this._logger?.LogTrace(message: "Closing server web socket before disposal was cancelled", exception: exception); + } + catch (WebSocketException exception) + { + this._logger?.LogTrace(message: "Closing server web socket before disposal raised web socket exception", exception: exception); + } + finally + { + if (connectedClient.Socket?.State != WebSocketState.Closed) + { + connectedClient.Socket?.Abort(); + } + + connectedClient.Socket?.Dispose(); + + // Remove client from dictionary when done + this._clients.TryRemove(requestId, out _); + } + } + + private async Task CloseAllSocketsAsync() + { + // Close all active sockets before disposing + foreach (var client in this._clients.Values) + { + if (client.Socket?.State == WebSocketState.Open) + { + await client.Socket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Closing", this._mainCancellationTokenSource.Token); + } + } + } + + public async ValueTask DisposeAsync() + { + try + { + this._serverIsRunning = false; + await this.CloseAllSocketsAsync(); // Close all sockets before finishing the tasks + await Task.WhenAll(this._runningTasks).ConfigureAwait(false); + this._socketCancellationTokenSource.Cancel(); + this._mainCancellationTokenSource.Cancel(); + } + catch (OperationCanceledException exception) + { + this._logger?.LogTrace(message: "\"Disposing web socket test server raised operation cancel exception", exception: exception); + } + finally + { + this._httpListener.Stop(); + this._httpListener.Close(); + this._socketCancellationTokenSource.Dispose(); + this._mainCancellationTokenSource.Dispose(); + } + } + + public void Dispose() + { + this.DisposeAsync().AsTask().GetAwaiter().GetResult(); + } +} diff --git a/dotnet/src/Skills/Skills.UnitTests/XunitHelpers/XunitLogger.cs b/dotnet/src/Connectors/Connectors.UnitTests/XunitLogger.cs similarity index 94% rename from dotnet/src/Skills/Skills.UnitTests/XunitHelpers/XunitLogger.cs rename to dotnet/src/Connectors/Connectors.UnitTests/XunitLogger.cs index f2c7e2848c87..1521dac75bed 100644 --- a/dotnet/src/Skills/Skills.UnitTests/XunitHelpers/XunitLogger.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/XunitLogger.cs @@ -4,7 +4,7 @@ using Microsoft.Extensions.Logging; using Xunit.Abstractions; -namespace SemanticKernel.Skills.UnitTests.XunitHelpers; +namespace SemanticKernel.Connectors.UnitTests; /// /// A logger that writes to the Xunit test output diff --git a/dotnet/src/Extensions/Planning.ActionPlanner/ActionPlanner.cs b/dotnet/src/Extensions/Planning.ActionPlanner/ActionPlanner.cs index bba382bd2c96..3fed5f5c8008 100644 --- a/dotnet/src/Extensions/Planning.ActionPlanner/ActionPlanner.cs +++ b/dotnet/src/Extensions/Planning.ActionPlanner/ActionPlanner.cs @@ -34,6 +34,11 @@ public sealed class ActionPlanner private const string StopSequence = "#END-OF-PLAN"; private const string SkillName = "this"; + /// + /// The regular expression for extracting serialized plan. + /// + private static readonly Regex PlanRegex = new("^[^{}]*(((?'Open'{)[^{}]*)+((?'Close-Open'})[^{}]*)+)*(?(Open)(?!))", RegexOptions.Singleline | RegexOptions.Compiled); + // Planner semantic function private readonly ISKFunction _plannerFunction; @@ -221,8 +226,7 @@ No parameters. /// Instance of object deserialized from extracted JSON. private ActionPlanResponse? ParsePlannerResult(SKContext plannerResult) { - Regex planRegex = new("^[^{}]*(((?'Open'{)[^{}]*)+((?'Close-Open'})[^{}]*)+)*(?(Open)(?!))", RegexOptions.Singleline); - Match match = planRegex.Match(plannerResult.ToString()); + Match match = PlanRegex.Match(plannerResult.ToString()); if (match.Success && match.Groups["Close"].Length > 0) { diff --git a/dotnet/src/Extensions/Planning.SequentialPlanner/ISequentialPlanner.cs b/dotnet/src/Extensions/Planning.SequentialPlanner/ISequentialPlanner.cs new file mode 100644 index 000000000000..23ee168f55f1 --- /dev/null +++ b/dotnet/src/Extensions/Planning.SequentialPlanner/ISequentialPlanner.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Planning.Sequential; + +/// +/// Interface for planner that uses semantic function to create a sequential plan. +/// +public interface ISequentialPlanner +{ + /// + /// Create a plan for a goal. + /// + /// The goal to create a plan for. + /// The plan. + /// Thrown when the plan cannot be created. + Task CreatePlanAsync(string goal); +} diff --git a/dotnet/src/Extensions/Planning.SequentialPlanner/InstrumentedSequentialPlanner.cs b/dotnet/src/Extensions/Planning.SequentialPlanner/InstrumentedSequentialPlanner.cs new file mode 100644 index 000000000000..7a0326241a7e --- /dev/null +++ b/dotnet/src/Extensions/Planning.SequentialPlanner/InstrumentedSequentialPlanner.cs @@ -0,0 +1,100 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Microsoft.SemanticKernel.Planning.Sequential; + +/// +/// Instrumented planner that uses semantic function to create a sequential plan. +/// Captures planner-related logs and metrics. +/// +public sealed class InstrumentedSequentialPlanner : ISequentialPlanner +{ + /// + /// Initialize a new instance of the class. + /// + /// Instance of to decorate. + /// Optional logger. + public InstrumentedSequentialPlanner( + ISequentialPlanner planner, + ILogger? logger = null) + { + this._planner = planner; + this._logger = logger ?? NullLogger.Instance; + } + + /// + public async Task CreatePlanAsync(string goal) + { + using var activity = s_activitySource.StartActivity("SequentialPlanner.CreatePlan"); + + this._logger.LogInformation("Plan creation started."); + + // Sensitive data, logging as trace, disabled by default + this._logger.LogTrace("Plan Goal: {Goal}", goal); + + var stopwatch = new Stopwatch(); + + try + { + stopwatch.Start(); + + var plan = await this._planner.CreatePlanAsync(goal).ConfigureAwait(false); + + stopwatch.Stop(); + + this._logger.LogInformation("Plan creation status: {Status}", "Success"); + + this._logger.LogInformation("Created plan: \n {Plan}", plan.ToSafePlanString()); + + // Sensitive data, logging as trace, disabled by default + this._logger.LogTrace("Created plan with details: \n {Plan}", plan.ToPlanString()); + + return plan; + } + catch (Exception ex) + { + this._logger.LogInformation("Plan creation status: {Status}", "Failed"); + this._logger.LogError(ex, "Plan creation exception details: {Message}", ex.Message); + + throw; + } + finally + { + this._logger.LogInformation("Plan creation finished in {ExecutionTime}ms.", stopwatch.ElapsedMilliseconds); + + s_createPlanExecutionTime.Record(stopwatch.ElapsedMilliseconds); + } + } + + #region private ================================================================================ + + private readonly ISequentialPlanner _planner; + private readonly ILogger _logger; + + /// + /// Instance of for planner-related activities. + /// + private static ActivitySource s_activitySource = new(typeof(InstrumentedSequentialPlanner).FullName); + + /// + /// Instance of for planner-related metrics. + /// + private static Meter s_meter = new(typeof(InstrumentedSequentialPlanner).FullName); + + /// + /// Instance of to record plan creation execution time. + /// + private static Histogram s_createPlanExecutionTime = + s_meter.CreateHistogram( + name: "SK.SequentialPlanner.CreatePlan.ExecutionTime", + unit: "ms", + description: "Execution time of plan creation"); + + #endregion +} diff --git a/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlanner.cs b/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlanner.cs index bf272d6f6219..5c9c8179f319 100644 --- a/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlanner.cs +++ b/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlanner.cs @@ -15,7 +15,7 @@ namespace Microsoft.SemanticKernel.Planning; /// /// A planner that uses semantic function to create a sequential plan. /// -public sealed class SequentialPlanner +public sealed class SequentialPlanner : ISequentialPlanner { private const string StopSequence = ""; @@ -49,12 +49,7 @@ public SequentialPlanner( this._context = kernel.CreateNewContext(); } - /// - /// Create a plan for a goal. - /// - /// The goal to create a plan for. - /// The plan. - /// Thrown when the plan cannot be created. + /// public async Task CreatePlanAsync(string goal) { if (string.IsNullOrEmpty(goal)) diff --git a/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlannerExtensions.cs b/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlannerExtensions.cs new file mode 100644 index 000000000000..4b212c618c8a --- /dev/null +++ b/dotnet/src/Extensions/Planning.SequentialPlanner/SequentialPlannerExtensions.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Planning.Sequential; + +/// +/// Extension methods for class. +/// +public static class SequentialPlannerExtensions +{ + /// + /// Returns decorated instance of with enabled instrumentation. + /// + /// Instance of to decorate. + /// Optional logger. + public static ISequentialPlanner WithInstrumentation(this ISequentialPlanner planner, ILogger? logger = null) + { + return new InstrumentedSequentialPlanner(planner, logger); + } +} diff --git a/dotnet/src/Extensions/Planning.StepwisePlanner/StepwisePlanner.cs b/dotnet/src/Extensions/Planning.StepwisePlanner/StepwisePlanner.cs index b108b88b366f..068db8de3e16 100644 --- a/dotnet/src/Extensions/Planning.StepwisePlanner/StepwisePlanner.cs +++ b/dotnet/src/Extensions/Planning.StepwisePlanner/StepwisePlanner.cs @@ -34,10 +34,12 @@ public class StepwisePlanner /// The semantic kernel instance. /// Optional configuration object /// Optional prompt override + /// Optional prompt config override public StepwisePlanner( IKernel kernel, StepwisePlannerConfig? config = null, - string? prompt = null) + string? prompt = null, + PromptTemplateConfig? promptUserConfig = null) { Verify.NotNull(kernel); this._kernel = kernel; @@ -45,12 +47,16 @@ public StepwisePlanner( this.Config = config ?? new(); this.Config.ExcludedSkills.Add(RestrictedSkillName); - var promptConfig = new PromptTemplateConfig(); + var promptConfig = promptUserConfig ?? new PromptTemplateConfig(); var promptTemplate = prompt ?? EmbeddedResource.Read("Skills.StepwiseStep.skprompt.txt"); - string promptConfigString = EmbeddedResource.Read("Skills.StepwiseStep.config.json"); - if (!string.IsNullOrEmpty(promptConfigString)) + + if (promptUserConfig == null) { - promptConfig = PromptTemplateConfig.FromJson(promptConfigString); + string promptConfigString = EmbeddedResource.Read("Skills.StepwiseStep.config.json"); + if (!string.IsNullOrEmpty(promptConfigString)) + { + promptConfig = PromptTemplateConfig.FromJson(promptConfigString); + } } promptConfig.Completion.MaxTokens = this.Config.MaxTokens; diff --git a/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs index 08d62f90b22d..0a09b2a055c1 100644 --- a/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs @@ -36,8 +36,8 @@ public async Task HuggingFaceLocalAndRemoteTextCompletionAsync() // Arrange const string Input = "This is test"; - using var huggingFaceLocal = new HuggingFaceTextCompletion(Model, endpoint: Endpoint); - using var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey()); + var huggingFaceLocal = new HuggingFaceTextCompletion(Model, endpoint: Endpoint); + var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey()); // Act var localResponse = await huggingFaceLocal.CompleteAsync(Input, new CompleteRequestSettings()); @@ -60,7 +60,7 @@ public async Task RemoteHuggingFaceTextCompletionWithCustomHttpClientAsync() using var httpClient = new HttpClient(); httpClient.BaseAddress = new Uri("https://api-inference.huggingface.co/models"); - using var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey(), httpClient: httpClient); + var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey(), httpClient: httpClient); // Act var remoteResponse = await huggingFaceRemote.CompleteAsync(Input, new CompleteRequestSettings()); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs index 6ba48d4953f8..62b43eb1a183 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -379,6 +379,28 @@ public async Task ItCanUpsertDifferentMemoryRecordsWithSameKeyMultipleTimesAsync this.AssertMemoryRecordEqual(expectedRecord2, actualRecord2); } + [Theory(Skip = SkipReason)] + [InlineData(true)] + [InlineData(false)] + public async Task ItProcessesBooleanValuesCorrectlyAsync(bool isReference) + { + // Arrange + var collectionName = this.GetRandomCollectionName(); + var metadata = this.GetRandomMemoryRecordMetadata(isReference: isReference); + var expectedRecord = this.GetRandomMemoryRecord(metadata: metadata); + + await this._chromaMemoryStore.CreateCollectionAsync(collectionName); + + // Act + var createdRecordKey = await this._chromaMemoryStore.UpsertAsync(collectionName, expectedRecord); + var actualRecord = await this._chromaMemoryStore.GetAsync(collectionName, createdRecordKey, true); + + // Assert + Assert.NotNull(actualRecord); + + Assert.Equal(expectedRecord.Metadata.IsReference, actualRecord.Metadata.IsReference); + } + public void Dispose() { this.Dispose(true); @@ -429,5 +451,28 @@ private MemoryRecord GetRandomMemoryRecord(string? key = null, Embedding? key: recordKey); } + private MemoryRecord GetRandomMemoryRecord(MemoryRecordMetadata metadata, Embedding? embedding = null) + { + var recordEmbedding = embedding ?? new Embedding(new[] { 1f, 3f, 5f }); + + return MemoryRecord.FromMetadata( + metadata: metadata, + embedding: recordEmbedding, + key: metadata.Id); + } + + private MemoryRecordMetadata GetRandomMemoryRecordMetadata(bool isReference = false, string? key = null) + { + var recordKey = key ?? Guid.NewGuid().ToString(); + + return new MemoryRecordMetadata( + isReference: isReference, + id: recordKey, + text: "text-" + Guid.NewGuid().ToString(), + description: "description-" + Guid.NewGuid().ToString(), + externalSourceName: "source-name-" + Guid.NewGuid().ToString(), + additionalMetadata: "metadata-" + Guid.NewGuid().ToString()); + } + #endregion } diff --git a/dotnet/src/IntegrationTests/Connectors/Oobabooga/OobaboogaTextCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Oobabooga/OobaboogaTextCompletionTests.cs new file mode 100644 index 000000000000..78d98dafc1ba --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Oobabooga/OobaboogaTextCompletionTests.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.WebSockets; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.AI.TextCompletion; +using Microsoft.SemanticKernel.Connectors.AI.Oobabooga.TextCompletion; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Oobabooga; + +/// +/// Integration tests for . +/// +public sealed class OobaboogaTextCompletionTests : IDisposable +{ + private const string Endpoint = "http://localhost"; + private const int BlockingPort = 5000; + private const int StreamingPort = 5005; + + private readonly IConfigurationRoot _configuration; + private List _webSockets = new(); + private Func _webSocketFactory; + + public OobaboogaTextCompletionTests() + { + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .Build(); + this._webSocketFactory = () => + { + var toReturn = new ClientWebSocket(); + this._webSockets.Add(toReturn); + return toReturn; + }; + } + + private const string Input = " My name is"; + + [Fact(Skip = "This test is for manual verification.")] + public async Task OobaboogaLocalTextCompletionAsync() + { + var oobaboogaLocal = new OobaboogaTextCompletion( + endpoint: new Uri(Endpoint), + blockingPort: BlockingPort); + + // Act + var localResponse = await oobaboogaLocal.CompleteAsync(Input, new CompleteRequestSettings() + { + Temperature = 0.01, + MaxTokens = 7, + TopP = 0.1, + }); + + AssertAcceptableResponse(localResponse); + } + + [Fact(Skip = "This test is for manual verification.")] + public async Task OobaboogaLocalTextCompletionStreamingAsync() + { + var oobaboogaLocal = new OobaboogaTextCompletion( + endpoint: new Uri(Endpoint), + streamingPort: StreamingPort, + webSocketFactory: this._webSocketFactory); + + // Act + var localResponse = oobaboogaLocal.CompleteStreamAsync(Input, new CompleteRequestSettings() + { + Temperature = 0.01, + MaxTokens = 7, + TopP = 0.1, + }); + + StringBuilder stringBuilder = new(); + await foreach (var result in localResponse) + { + stringBuilder.Append(result); + } + + var resultsMerged = stringBuilder.ToString(); + AssertAcceptableResponse(resultsMerged); + } + + private static void AssertAcceptableResponse(string localResponse) + { + // Assert + Assert.NotNull(localResponse); + // Depends on the target LLM obviously, but most LLMs should propose an arbitrary surname preceded by a white space, including the start prompt or not + // ie " My name is" => " John (...)" or " My name is" => " My name is John (...)". + // Here are a couple LLMs that were tested successfully: gpt2, aisquared_dlite-v1-355m, bigscience_bloomz-560m, eachadea_vicuna-7b-1.1, TheBloke_WizardLM-30B-GPTQ etc. + // A few will return an empty string, but well those shouldn't be used for integration tests. + var expectedRegex = new Regex(@"\s\w+.*"); + Assert.Matches(expectedRegex, localResponse); + } + + public void Dispose() + { + foreach (ClientWebSocket clientWebSocket in this._webSockets) + { + clientWebSocket.Dispose(); + } + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs index 787cb28fd2db..cac97cedeb00 100644 --- a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs @@ -291,8 +291,5 @@ private async Task DeleteAllClassesAsync() public void Dispose() { this.httpClient.Dispose(); -#pragma warning disable CS0618 // Type or member is obsolete - this.weaviateMemoryStore.Dispose(); -#pragma warning restore CS0618 // Type or member is obsolete } } diff --git a/dotnet/src/IntegrationTests/Fakes/EmailSkillFake.cs b/dotnet/src/IntegrationTests/Fakes/EmailSkillFake.cs index 695f265dd87d..5b54f167b9cd 100644 --- a/dotnet/src/IntegrationTests/Fakes/EmailSkillFake.cs +++ b/dotnet/src/IntegrationTests/Fakes/EmailSkillFake.cs @@ -25,11 +25,11 @@ public Task GetEmailAddressAsync( { if (string.IsNullOrEmpty(input)) { - logger.LogDebug("Returning hard coded email for {0}", input); + logger.LogTrace("Returning hard coded email for {0}", input); return Task.FromResult("johndoe1234@example.com"); } - logger.LogDebug("Returning dynamic email for {0}", input); + logger.LogTrace("Returning dynamic email for {0}", input); return Task.FromResult($"{input}@example.com"); } diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 28efab76da42..7443e4100df9 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -34,6 +34,7 @@ + diff --git a/dotnet/src/IntegrationTests/Planning/StepwisePlanner/StepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planning/StepwisePlanner/StepwisePlannerTests.cs index a47c026a1086..766ff8c5239b 100644 --- a/dotnet/src/IntegrationTests/Planning/StepwisePlanner/StepwisePlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planning/StepwisePlanner/StepwisePlannerTests.cs @@ -49,7 +49,7 @@ public void CanCreateStepwisePlan(bool useChatModel, string prompt, string expec // Arrange bool useEmbeddings = false; IKernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); - using var bingConnector = new BingConnector(this._bingApiKey); + var bingConnector = new BingConnector(this._bingApiKey); var webSearchEngineSkill = new WebSearchEngineSkill(bingConnector); kernel.ImportSkill(webSearchEngineSkill, "WebSearch"); kernel.ImportSkill(new TimeSkill(), "time"); @@ -75,7 +75,7 @@ public async void CanExecuteStepwisePlan(bool useChatModel, string prompt) // Arrange bool useEmbeddings = false; IKernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); - using var bingConnector = new BingConnector(this._bingApiKey); + var bingConnector = new BingConnector(this._bingApiKey); var webSearchEngineSkill = new WebSearchEngineSkill(bingConnector); kernel.ImportSkill(webSearchEngineSkill, "WebSearch"); kernel.ImportSkill(new TimeSkill(), "time"); diff --git a/dotnet/src/IntegrationTests/README.md b/dotnet/src/IntegrationTests/README.md index 00186f6309f6..9edb16e85896 100644 --- a/dotnet/src/IntegrationTests/README.md +++ b/dotnet/src/IntegrationTests/README.md @@ -8,6 +8,7 @@ 3. **HuggingFace API key**: see https://huggingface.co/docs/huggingface_hub/guides/inference for details. 4. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api) and select `Try Now` to get started. +5. **Oobabooga Text generation web UI**: Follow the [installation instructions](https://github.com/oobabooga/text-generation-webui#installation) to get a local Oobabooga instance running. Follow the [download instructions](https://github.com/oobabooga/text-generation-webui#downloading-models) to install a test model e.g. `python download-model.py gpt2`. Follow the [starting instructions](https://github.com/oobabooga/text-generation-webui#starting-the-web-ui) to start your local instance, enabling API, e.g. `python server.py --model gpt2 --listen --api --api-blocking-port "5000" --api-streaming-port "5005"`. Note that `--model` parameter is optional and models can be downloaded and hot swapped using exclusively the web UI, making it easy to test various models. 5. **Postgres**: start a postgres with the [pgvector](https://github.com/pgvector/pgvector) extension installed. You can easily do it using the docker image [ankane/pgvector](https://hub.docker.com/r/ankane/pgvector). ## Setup diff --git a/dotnet/src/IntegrationTests/WebSkill/WebSkillTests.cs b/dotnet/src/IntegrationTests/WebSkill/WebSkillTests.cs index 86cbdfa106fa..99e7094f16d9 100644 --- a/dotnet/src/IntegrationTests/WebSkill/WebSkillTests.cs +++ b/dotnet/src/IntegrationTests/WebSkill/WebSkillTests.cs @@ -46,7 +46,7 @@ public async Task BingSkillTestAsync(string prompt, string expectedAnswerContain IKernel kernel = Kernel.Builder.WithLogger(this._logger).Build(); using XunitLogger connectorLogger = new(this._output); - using BingConnector connector = new(this._bingApiKey, connectorLogger); + BingConnector connector = new(this._bingApiKey, connectorLogger); Assert.NotEmpty(this._bingApiKey); WebSearchEngineSkill skill = new(connector); @@ -68,7 +68,7 @@ public async Task WebFileDownloadSkillFileTestAsync() // Arrange IKernel kernel = Kernel.Builder.WithLogger(this._logger).Build(); using XunitLogger skillLogger = new(this._output); - using var skill = new WebFileDownloadSkill(skillLogger); + var skill = new WebFileDownloadSkill(skillLogger); var download = kernel.ImportSkill(skill, "WebFileDownload"); string fileWhereToSaveWebPage = Path.GetTempFileName(); var contextVariables = new ContextVariables("https://www.microsoft.com"); diff --git a/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs b/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs index 24bfdeffaa32..fc14a9e7d0be 100644 --- a/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs +++ b/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs @@ -5,8 +5,10 @@ using System.Threading.Tasks; #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously +#pragma warning disable CA1510 // Use 'ArgumentNullException.ThrowIfNull' (.NET 8) -namespace System.Linq; // for compatibility with System.Linq.Async.nupkg +// Used for compatibility with System.Linq.Async Nuget pkg +namespace System.Linq; internal static class AsyncEnumerable { @@ -92,9 +94,48 @@ public static async ValueTask CountAsync(this IAsyncEnumerable source { checked { count++; } } + return count; } + /// + /// Determines whether any element of an async-enumerable sequence satisfies a condition. + /// + /// The type of the elements in the source sequence. + /// An async-enumerable sequence whose elements to apply the predicate to. + /// A function to test each element for a condition. + /// The optional cancellation token to be used for cancelling the sequence at any time. + /// An async-enumerable sequence containing a single element determining whether any elements in the source sequence pass the test in the specified predicate. + /// or is null. + /// The return type of this operator differs from the corresponding operator on IEnumerable in order to retain asynchronous behavior. + public static ValueTask AnyAsync(this IAsyncEnumerable source, Func predicate, CancellationToken cancellationToken = default) + { + if (source == null) + { + throw new ArgumentNullException(nameof(source)); + } + + if (predicate == null) + { + throw new ArgumentNullException(nameof(predicate)); + } + + return Core(source, predicate, cancellationToken); + + static async ValueTask Core(IAsyncEnumerable source, Func predicate, CancellationToken cancellationToken) + { + await foreach (var item in source.WithCancellation(cancellationToken).ConfigureAwait(false)) + { + if (predicate(item)) + { + return true; + } + } + + return false; + } + } + private sealed class EmptyAsyncEnumerable : IAsyncEnumerable, IAsyncEnumerator { public static readonly EmptyAsyncEnumerable Instance = new(); diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs index a46ff96a04c1..bb020a279288 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; #pragma warning disable CA1710 @@ -12,15 +11,6 @@ namespace Microsoft.SemanticKernel.AI.ChatCompletion; /// public class ChatHistory : List { - [Obsolete("This enumeration is deprecated, use AuthorRole struct instead")] - public enum AuthorRoles - { - Unknown = -1, - System = 0, - User = 1, - Assistant = 2, - } - private sealed class ChatMessage : ChatMessageBase { public ChatMessage(AuthorRole authorRole, string content) : base(authorRole, content) @@ -28,44 +18,11 @@ public ChatMessage(AuthorRole authorRole, string content) : base(authorRole, con } } - /// - /// Chat message representation - /// - [Obsolete("This class is deprecated, using instances of this class will not be supported")] - public class Message : ChatMessageBase - { - /// - /// Role of the message author, e.g. user/assistant/system - /// - public AuthorRoles AuthorRole { get; set; } - - /// - /// Create a new instance - /// - /// Role of message author - /// Message content - public Message(AuthorRoles authorRole, string content) : base(new AuthorRole(authorRole.ToString()), content) - { - this.AuthorRole = authorRole; - } - } - /// /// List of messages in the chat /// public List Messages => this; - /// - /// Add a message to the chat history - /// - /// Role of the message author - /// Message content - [Obsolete("This method with AuthorRoles enumeration is deprecated, use AddMessage(AuthorRole authorRole, string content) instead")] - public void AddMessage(AuthorRoles authorRole, string content) - { - this.Add(new Message(authorRole, content)); - } - /// /// Add a message to the chat history /// diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionResult.cs deleted file mode 100644 index 51d456666553..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionResult.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Interface for text completion results -/// -[Obsolete("This interface is deprecated and will be removed in one of the next SK SDK versions. Use the ITextResult interface instead.")] -public interface ITextCompletionResult : ITextResult -{ -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionStreamingResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionStreamingResult.cs deleted file mode 100644 index af0a429f1474..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletionStreamingResult.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Interface for text completion streaming results -/// -[Obsolete("This interface is deprecated and will be removed in one of the next SK SDK versions. Use the ITextStreamingResult interface instead.")] -public interface ITextCompletionStreamingResult : ITextStreamingResult -{ -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs index 3172ee86fd38..31d468bfe647 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs @@ -54,6 +54,7 @@ public static async IAsyncEnumerable CompleteStreamAsync(this ITextCompl { yield return word; } + yield break; } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/ITelemetryService.cs b/dotnet/src/SemanticKernel.Abstractions/Diagnostics/ITelemetryService.cs deleted file mode 100644 index f7c592ace343..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/ITelemetryService.cs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Diagnostics; - -/// -/// Interface for common telemetry events to track actions across the semantic kernel. -/// -public interface ITelemetryService -{ - /// - /// Creates a telemetry event when a skill function is executed. - /// - /// Name of the skill - /// Skill function name - /// If the skill executed successfully - void TrackSkillFunction(string skillName, string functionName, bool success); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/KernelConfig.cs b/dotnet/src/SemanticKernel.Abstractions/KernelConfig.cs index 44ad00c1a89c..1594fe926113 100644 --- a/dotnet/src/SemanticKernel.Abstractions/KernelConfig.cs +++ b/dotnet/src/SemanticKernel.Abstractions/KernelConfig.cs @@ -1,11 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.AI.TextCompletion; using Microsoft.SemanticKernel.Reliability; namespace Microsoft.SemanticKernel; @@ -26,157 +20,6 @@ public sealed class KernelConfig /// public HttpRetryConfig DefaultHttpRetryConfig { get; private set; } = new(); - /// - /// Text completion service factories - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public Dictionary> TextCompletionServices { get; } = new(); - - /// - /// Chat completion service factories - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public Dictionary> ChatCompletionServices { get; } = new(); - - /// - /// Text embedding generation service factories - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public Dictionary>> TextEmbeddingGenerationServices { get; } = new(); - - /// - /// Image generation service factories - /// - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - public Dictionary> ImageGenerationServices { get; } = new(); - - /// - /// Default name used when binding services if the user doesn't provide a custom value - /// - internal string DefaultServiceId => "__SK_DEFAULT"; - - /// - /// Add to the list a service for text completion, e.g. Azure OpenAI Text Completion. - /// - /// Function used to instantiate the service object - /// Id used to identify the service - /// Current object instance - /// Failure if a service with the same id already exists - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig AddTextCompletionService( - Func serviceFactory, - string? serviceId = null) - { - if (serviceId != null && serviceId.Equals(this.DefaultServiceId, StringComparison.OrdinalIgnoreCase)) - { - throw new KernelException( - KernelException.ErrorCodes.InvalidServiceConfiguration, - $"The service id '{serviceId}' is reserved, please use a different name"); - } - - serviceId ??= this.DefaultServiceId; - - this.TextCompletionServices[serviceId] = serviceFactory; - if (this.TextCompletionServices.Count == 1) - { - this.TextCompletionServices[this.DefaultServiceId] = serviceFactory; - } - - return this; - } - - /// - /// Add to the list a service for chat completion, e.g. OpenAI ChatGPT. - /// - /// Function used to instantiate the service object - /// Id used to identify the service - /// Current object instance - /// Failure if a service with the same id already exists - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig AddChatCompletionService( - Func serviceFactory, - string? serviceId = null) - { - if (serviceId != null && serviceId.Equals(this.DefaultServiceId, StringComparison.OrdinalIgnoreCase)) - { - throw new KernelException( - KernelException.ErrorCodes.InvalidServiceConfiguration, - $"The service id '{serviceId}' is reserved, please use a different name"); - } - - serviceId ??= this.DefaultServiceId; - - this.ChatCompletionServices[serviceId] = serviceFactory; - if (this.ChatCompletionServices.Count == 1) - { - this.ChatCompletionServices[this.DefaultServiceId] = serviceFactory; - } - - return this; - } - - /// - /// Add to the list a service for text embedding generation, e.g. Azure OpenAI Text Embedding. - /// - /// Function used to instantiate the service object - /// Id used to identify the service - /// Current object instance - /// Failure if a service with the same id already exists - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig AddTextEmbeddingGenerationService( - Func> serviceFactory, - string? serviceId = null) - { - if (serviceId != null && serviceId.Equals(this.DefaultServiceId, StringComparison.OrdinalIgnoreCase)) - { - throw new KernelException( - KernelException.ErrorCodes.InvalidServiceConfiguration, - $"The service id '{serviceId}' is reserved, please use a different name"); - } - - serviceId ??= this.DefaultServiceId; - - this.TextEmbeddingGenerationServices[serviceId] = serviceFactory; - if (this.TextEmbeddingGenerationServices.Count == 1) - { - this.TextEmbeddingGenerationServices[this.DefaultServiceId] = serviceFactory; - } - - return this; - } - - /// - /// Add to the list a service for image generation, e.g. OpenAI DallE. - /// - /// Function used to instantiate the service object - /// Id used to identify the service - /// Current object instance - /// Failure if a service with the same id already exists - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig AddImageGenerationService( - Func serviceFactory, - string? serviceId = null) - { - if (serviceId != null && serviceId.Equals(this.DefaultServiceId, StringComparison.OrdinalIgnoreCase)) - { - throw new KernelException( - KernelException.ErrorCodes.InvalidServiceConfiguration, - $"The service id '{serviceId}' is reserved, please use a different name"); - } - - serviceId ??= this.DefaultServiceId; - - this.ImageGenerationServices[serviceId] = serviceFactory; - if (this.ImageGenerationServices.Count == 1) - { - this.ImageGenerationServices[this.DefaultServiceId] = serviceFactory; - } - - return this; - } - - #region Set - /// /// Set the http retry handler factory to use for the kernel. /// @@ -202,83 +45,4 @@ public KernelConfig SetDefaultHttpRetryConfig(HttpRetryConfig? httpRetryConfig) return this; } - - /// - /// Set the default completion service to use for the kernel. - /// - /// Identifier of completion service to use. - /// The updated kernel configuration. - /// Thrown if the requested service doesn't exist. - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithDefaultAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig SetDefaultTextCompletionService(string serviceId) - { - if (!this.TextCompletionServices.ContainsKey(serviceId)) - { - throw new KernelException( - KernelException.ErrorCodes.ServiceNotFound, - $"A text completion service id '{serviceId}' doesn't exist"); - } - - this.TextCompletionServices[this.DefaultServiceId] = this.TextCompletionServices[serviceId]; - return this; - } - - /// - /// Set the default embedding service to use for the kernel. - /// - /// Identifier of text embedding service to use. - /// The updated kernel configuration. - /// Thrown if the requested service doesn't exist. - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. Please use one of the WithDefaultAIService extension methods in the KernelBuilder class instead.")] - public KernelConfig SetDefaultTextEmbeddingGenerationService(string serviceId) - { - if (!this.TextEmbeddingGenerationServices.ContainsKey(serviceId)) - { - throw new KernelException( - KernelException.ErrorCodes.ServiceNotFound, - $"A text embedding generation service id '{serviceId}' doesn't exist"); - } - - this.TextEmbeddingGenerationServices[this.DefaultServiceId] = this.TextEmbeddingGenerationServices[serviceId]; - return this; - } - - #endregion - - #region Remove - - /// - /// Remove all text completion services. - /// - /// The updated kernel configuration. - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public KernelConfig RemoveAllTextCompletionServices() - { - this.TextCompletionServices.Clear(); - return this; - } - - /// - /// Remove all chat completion services. - /// - /// The updated kernel configuration. - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public KernelConfig RemoveAllChatCompletionServices() - { - this.ChatCompletionServices.Clear(); - return this; - } - - /// - /// Remove all text embedding generation services. - /// - /// The updated kernel configuration. - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public KernelConfig RemoveAllTextEmbeddingGenerationServices() - { - this.TextEmbeddingGenerationServices.Clear(); - return this; - } - - #endregion } diff --git a/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/ObsoleteAttributes.cs b/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/ObsoleteAttributes.cs deleted file mode 100644 index 94569040914e..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/ObsoleteAttributes.cs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.SkillDefinition; - -// TODO: Delete these attributes. - -[Obsolete("This attribute is deprecated and will be removed in one of the next SK SDK versions. Name a parameter \"input\" or use `[SKName(\"input\")]` on the parameter.")] -[EditorBrowsable(EditorBrowsableState.Never)] -[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] -public sealed class SKFunctionInputAttribute : Attribute -{ - public string Description { get; set; } = string.Empty; - - public string DefaultValue { get; set; } = string.Empty; - - public ParameterView ToParameterView() => - new() - { - Name = "input", - Description = this.Description, - DefaultValue = this.DefaultValue - }; -} - -[Obsolete("This attribute is deprecated and will be removed in one of the next SK SDK versions. Use `[SKName(\"FunctionName\")]`.")] -[EditorBrowsable(EditorBrowsableState.Never)] -[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] -public sealed class SKFunctionNameAttribute : Attribute -{ - public SKFunctionNameAttribute(string name) - { - Verify.ValidFunctionName(name); - this.Name = name; - } - - public string Name { get; } -} - -[Obsolete("This attribute is deprecated and will be removed in one of the next SK SDK versions. Use the DescriptionAttribute, DefaultValueAttribute, and SKNameAttribute instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -[AttributeUsage(AttributeTargets.Method, AllowMultiple = true)] -public sealed class SKFunctionContextParameterAttribute : Attribute -{ - private string _name = ""; - - public string Name - { - get => this._name; - set - { - Verify.ValidFunctionParamName(value); - this._name = value; - } - } - - public string Description { get; set; } = string.Empty; - - public string DefaultValue { get; set; } = string.Empty; - - public ParameterView ToParameterView() - { - if (string.IsNullOrWhiteSpace(this.Name)) - { - throw new InvalidOperationException($"The {nameof(SKFunctionContextParameterAttribute)}'s Name must be non-null and not composed entirely of whitespace."); - } - - return new ParameterView - { - Name = this.Name, - Description = this.Description, - DefaultValue = this.DefaultValue - }; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/SKFunctionAttribute.cs b/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/SKFunctionAttribute.cs index c2bfc0f0cdbd..94fca1020f36 100644 --- a/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/SKFunctionAttribute.cs +++ b/dotnet/src/SemanticKernel.Abstractions/SkillDefinition/SKFunctionAttribute.cs @@ -55,19 +55,4 @@ public sealed class SKFunctionAttribute : Attribute public SKFunctionAttribute() { } - - /// - /// Initializes the attribute with the specified description. - /// - /// Description of the function to be used by a planner to auto-discover functions. - [Obsolete("This constructor is deprecated and will be removed in one of the next SK SDK versions.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public SKFunctionAttribute(string description) - { - this.Description = description; - } - - [Obsolete("This property is deprecated and will be removed in one of the next SK SDK versions.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string Description { get; } = null!; } diff --git a/dotnet/src/SemanticKernel.UnitTests/KernelConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/KernelConfigTests.cs index 150791140641..bafd04ca17bd 100644 --- a/dotnet/src/SemanticKernel.UnitTests/KernelConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/KernelConfigTests.cs @@ -74,28 +74,4 @@ public void HttpRetryHandlerFactoryIsSetToDefaultHttpRetryHandlerFactoryIfNotSet // Assert Assert.IsType(config.HttpHandlerFactory); } - - [Fact] - [System.Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void ItFailsWhenSetNonExistentTextCompletionService() - { - var target = new KernelConfig(); - var exception = Assert.Throws(() => - { - target.SetDefaultTextCompletionService("azure"); - }); - Assert.Equal(KernelException.ErrorCodes.ServiceNotFound, exception.ErrorCode); - } - - [Fact] - [System.Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void ItFailsWhenSetNonExistentEmbeddingService() - { - var target = new KernelConfig(); - var exception = Assert.Throws(() => - { - target.SetDefaultTextEmbeddingGenerationService("azure"); - }); - Assert.Equal(KernelException.ErrorCodes.ServiceNotFound, exception.ErrorCode); - } } diff --git a/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs b/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs index 9877838222a9..9f73071acf8e 100644 --- a/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs @@ -19,35 +19,6 @@ namespace SemanticKernel.UnitTests; public class KernelTests { - [Fact] - [System.Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions.")] - public void ItProvidesAccessToFunctionsViaSkillCollectionObsolete() - { - // Arrange - var kernel = KernelBuilder.Create(); - var factory = new Mock>(); - kernel.Config.AddTextCompletionService(factory.Object); - - var nativeSkill = new MySkill(); - kernel.CreateSemanticFunction(promptTemplate: "Tell me a joke", functionName: "joker", skillName: "jk", description: "Nice fun"); - kernel.ImportSkill(nativeSkill, "mySk"); - - // Act - FunctionsView data = kernel.Skills.GetFunctionsView(); - - // Assert - 3 functions, var name is not case sensitive - Assert.True(data.IsSemantic("jk", "joker")); - Assert.True(data.IsSemantic("JK", "JOKER")); - Assert.False(data.IsNative("jk", "joker")); - Assert.False(data.IsNative("JK", "JOKER")); - Assert.True(data.IsNative("mySk", "sayhello")); - Assert.True(data.IsNative("MYSK", "SayHello")); - Assert.True(data.IsNative("mySk", "ReadSkillCollectionAsync")); - Assert.True(data.IsNative("MYSK", "readskillcollectionasync")); - Assert.Single(data.SemanticFunctions["Jk"]); - Assert.Equal(3, data.NativeFunctions["mySk"].Count); - } - [Fact] public void ItProvidesAccessToFunctionsViaSkillCollection() { diff --git a/dotnet/src/SemanticKernel.UnitTests/SkillDefinition/SKFunctionTests2.cs b/dotnet/src/SemanticKernel.UnitTests/SkillDefinition/SKFunctionTests2.cs index 0f1840ea2b87..db791d814507 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SkillDefinition/SKFunctionTests2.cs +++ b/dotnet/src/SemanticKernel.UnitTests/SkillDefinition/SKFunctionTests2.cs @@ -6,7 +6,6 @@ using System.Reflection; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.SkillDefinition; using Moq; @@ -900,40 +899,6 @@ public async Task ItThrowsWhenItFailsToConvertAnArgument() AssertExtensions.AssertIsArgumentOutOfRange(result.LastException, "g", context.Variables["g"]); } - [Obsolete("This test tests obsolete functionality and should be removed when that functionality is removed.")] - [Fact] - public async Task ItStillSupportsObsoleteSKFunctionAttributes() - { - [SKFunction("Something something")] - [SKFunctionInput(Description = "Awesome input")] - [SKFunctionName("NotTheAddMethodYouAreLookingFor")] - [SKFunctionContextParameter(Name = "y", Description = "Awesome additional input", DefaultValue = "42")] - static string Add(string x, SKContext context) => - (int.Parse(x, CultureInfo.InvariantCulture) + - int.Parse(context["y"], CultureInfo.InvariantCulture)).ToString(CultureInfo.InvariantCulture); - - // Arrange - var context = Kernel.Builder.Build().CreateNewContext(); - context.Variables.Set("input", "1"); - context.Variables.Set("y", "2"); - - // Act/Assert - var func = SKFunction.FromNativeMethod(Method(Add)); - Assert.NotNull(func); - var parameters = func.Describe().Parameters; - context = await func.InvokeAsync(context); - - // Assert - Assert.Equal("NotTheAddMethodYouAreLookingFor", func.Name); - Assert.Equal("Something something", func.Description); - Assert.Equal("input", parameters[0].Name); - Assert.Equal("Awesome input", parameters[0].Description); - Assert.Equal("y", parameters[1].Name); - Assert.Equal("Awesome additional input", parameters[1].Description); - Assert.Equal("42", parameters[1].DefaultValue); - Assert.Equal("3", context.Variables.Input); - } - private static MethodInfo Method(Delegate method) { return method.Method; diff --git a/dotnet/src/SemanticKernel/Kernel.cs b/dotnet/src/SemanticKernel/Kernel.cs index 18536a6dd898..c0b9ae60069f 100644 --- a/dotnet/src/SemanticKernel/Kernel.cs +++ b/dotnet/src/SemanticKernel/Kernel.cs @@ -7,9 +7,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.ImageGeneration; using Microsoft.SemanticKernel.AI.TextCompletion; using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; @@ -238,60 +235,6 @@ public T GetService(string? name = null) where T : IAIService return service; } - if (typeof(T) == typeof(ITextCompletion)) - { - name ??= this.Config.DefaultServiceId; - -#pragma warning disable CS0618 // Type or member is obsolete - if (!this.Config.TextCompletionServices.TryGetValue(name, out Func factory)) - { - throw new KernelException(KernelException.ErrorCodes.ServiceNotFound, $"'{name}' text completion service not available"); - } - - var serv = factory.Invoke(this); - return (T)serv; - } - - if (typeof(T) == typeof(IEmbeddingGeneration)) - { - name ??= this.Config.DefaultServiceId; - - if (!this.Config.TextEmbeddingGenerationServices.TryGetValue(name, out Func> factory)) - { - throw new KernelException(KernelException.ErrorCodes.ServiceNotFound, $"'{name}' text embedding service not available"); - } - - var serv = factory.Invoke(this); - return (T)serv; - } - - if (typeof(T) == typeof(IChatCompletion)) - { - name ??= this.Config.DefaultServiceId; - - if (!this.Config.ChatCompletionServices.TryGetValue(name, out Func factory)) - { - throw new KernelException(KernelException.ErrorCodes.ServiceNotFound, $"'{name}' chat completion service not available"); - } - - var serv = factory.Invoke(this); - return (T)serv; - } - - if (typeof(T) == typeof(IImageGeneration)) - { - name ??= this.Config.DefaultServiceId; - - if (!this.Config.ImageGenerationServices.TryGetValue(name, out Func factory)) - { - throw new KernelException(KernelException.ErrorCodes.ServiceNotFound, $"'{name}' image generation service not available"); - } - - var serv = factory.Invoke(this); - return (T)serv; - } -#pragma warning restore CS0618 // Type or member is obsolete - throw new KernelException(KernelException.ErrorCodes.ServiceNotFound, $"Service of type {typeof(T)} and name {name ?? ""} not registered."); } diff --git a/dotnet/src/SemanticKernel/Planning/Plan.cs b/dotnet/src/SemanticKernel/Planning/Plan.cs index 541fc2a38b49..684dc88bee7a 100644 --- a/dotnet/src/SemanticKernel/Planning/Plan.cs +++ b/dotnet/src/SemanticKernel/Planning/Plan.cs @@ -3,6 +3,8 @@ using System; using System.Collections.Generic; using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Globalization; using System.Linq; using System.Text.Json; using System.Text.Json.Serialization; @@ -231,6 +233,7 @@ public Task RunNextStepAsync(IKernel kernel, ContextVariables variables, C kernel.Skills, kernel.Log, cancellationToken); + return this.InvokeNextStepAsync(context); } @@ -250,7 +253,13 @@ public async Task InvokeNextStepAsync(SKContext context) var functionVariables = this.GetNextStepVariables(context.Variables, step); // Execute the step - var functionContext = new SKContext(functionVariables, context.Memory, context.Skills, context.Log, context.CancellationToken); + var functionContext = new SKContext( + functionVariables, + context.Memory, + context.Skills, + context.Log, + context.CancellationToken); + var result = await step.InvokeAsync(functionContext).ConfigureAwait(false); var resultValue = result.Result.Trim(); @@ -328,13 +337,10 @@ public async Task InvokeAsync( { if (this.Function is not null) { - var result = await this.Function.InvokeAsync(context, settings).ConfigureAwait(false); + var result = await this.InstrumentedInvokeAsync(this.Function, context, settings).ConfigureAwait(false); if (result.ErrorOccurred) { - result.Log.LogError( - result.LastException, - "Something went wrong in plan step {0}.{1}:'{2}'", this.SkillName, this.Name, context.LastErrorDescription); return result; } @@ -587,6 +593,57 @@ private ContextVariables GetNextStepVariables(ContextVariables variables, Plan s return stepVariables; } + private async Task InstrumentedInvokeAsync( + ISKFunction function, + SKContext context, + CompleteRequestSettings? settings = null) + { + using var activity = s_activitySource.StartActivity($"{this.SkillName}.{this.Name}"); + + context.Log.LogInformation("{SkillName}.{StepName}: Step execution started.", this.SkillName, this.Name); + + var stopwatch = new Stopwatch(); + + stopwatch.Start(); + + var result = await function.InvokeAsync(context, settings).ConfigureAwait(false); + + stopwatch.Stop(); + + if (!result.ErrorOccurred) + { + context.Log.LogInformation( + "{SkillName}.{StepName}: Step execution status: {Status}.", + this.SkillName, this.Name, "Success"); + } + else + { + context.Log.LogInformation( + "{SkillName}.{StepName}: Step execution status: {Status}.", + this.SkillName, this.Name, "Failed"); + + context.Log.LogError( + result.LastException, + "Something went wrong in plan step {SkillName}.{StepName}:'{ErrorDescription}'", + this.SkillName, this.Name, context.LastErrorDescription); + } + + context.Log.LogInformation( + "{SkillName}.{StepName}: Step execution finished in {ExecutionTime}ms.", + this.SkillName, this.Name, stopwatch.ElapsedMilliseconds); + + var stepExecutionTimeMetricName = string.Format(CultureInfo.InvariantCulture, StepExecutionTimeMetricFormat, this.SkillName, this.Name); + + var stepExecutionTimeHistogram = s_meter.CreateHistogram( + name: stepExecutionTimeMetricName, + unit: "ms", + description: "Plan step execution time"); + + stepExecutionTimeHistogram.Record(stopwatch.ElapsedMilliseconds); + + return result; + } + private void SetFunction(ISKFunction function) { this.Function = function; @@ -625,4 +682,20 @@ private string DebuggerDisplay return display; } } + + #region Instrumentation + + private const string StepExecutionTimeMetricFormat = "SK.{0}.{1}.ExecutionTime"; + + /// + /// Instance of for plan-related activities. + /// + private static ActivitySource s_activitySource = new(typeof(Plan).FullName); + + /// + /// Instance of for planner-related metrics. + /// + private static Meter s_meter = new(typeof(Plan).FullName); + + #endregion } diff --git a/dotnet/src/SemanticKernel/Planning/PlanExtensions.cs b/dotnet/src/SemanticKernel/Planning/PlanExtensions.cs new file mode 100644 index 000000000000..c2b134577ae5 --- /dev/null +++ b/dotnet/src/SemanticKernel/Planning/PlanExtensions.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Extension methods for type. +/// +public static class PlanExtensions +{ + /// + /// Constructs string representation of without sensitive data. + /// + /// Instance of for string construction. + /// Optional indentation. + public static string ToSafePlanString(this Plan plan, string indent = " ") + { + string planString = string.Join("\n", plan.Steps.Select(step => + { + if (step.Steps.Count == 0) + { + string skillName = step.SkillName; + string stepName = step.Name; + + return $"{indent}{indent}- {string.Join(".", skillName, stepName)}"; + } + + return step.ToSafePlanString(indent + indent); + })); + + return planString; + } + + /// + /// Constructs string representation of . + /// + /// Instance of for string construction. + /// Optional indentation. + public static string ToPlanString(this Plan plan, string indent = " ") + { + string planString = string.Join("\n", plan.Steps.Select(step => + { + if (step.Steps.Count == 0) + { + string skillName = step.SkillName; + string stepName = step.Name; + + string parameters = string.Join(" ", step.Parameters.Select(param => $"{param.Key}='{param.Value}'")); + if (!string.IsNullOrEmpty(parameters)) + { + parameters = $" {parameters}"; + } + + string? outputs = step.Outputs.FirstOrDefault(); + if (!string.IsNullOrEmpty(outputs)) + { + outputs = $" => {outputs}"; + } + + return $"{indent}{indent}- {string.Join(".", skillName, stepName)}{parameters}{outputs}"; + } + + return step.ToPlanString(indent + indent); + })); + + return planString; + } +} diff --git a/dotnet/src/SemanticKernel/SemanticKernel.csproj b/dotnet/src/SemanticKernel/SemanticKernel.csproj index 65428d996429..32764f5e4829 100644 --- a/dotnet/src/SemanticKernel/SemanticKernel.csproj +++ b/dotnet/src/SemanticKernel/SemanticKernel.csproj @@ -25,6 +25,10 @@ + + + + diff --git a/dotnet/src/SemanticKernel/SkillDefinition/SKFunction.cs b/dotnet/src/SemanticKernel/SkillDefinition/SKFunction.cs index f522577cf186..4c26ff23c443 100644 --- a/dotnet/src/SemanticKernel/SkillDefinition/SKFunction.cs +++ b/dotnet/src/SemanticKernel/SkillDefinition/SKFunction.cs @@ -24,7 +24,6 @@ namespace Microsoft.SemanticKernel.SkillDefinition; -#pragma warning disable CS0618 // Temporarily suppressing Obsoletion warnings until obsolete attributes for compatibility are removed #pragma warning disable format /// @@ -393,7 +392,6 @@ private static MethodDetails GetMethodDetails( // We don't apply any heuristics to the value supplied by SKName so that it can always be used // as a definitive override. string? functionName = method.GetCustomAttribute(inherit: true)?.Name?.Trim(); - functionName ??= method.GetCustomAttribute(inherit: true)?.Name?.Trim(); // TODO: SKFunctionName is deprecated. Remove. if (string.IsNullOrEmpty(functionName)) { functionName = SanitizeMetadataName(method.Name!); @@ -410,7 +408,6 @@ private static MethodDetails GetMethodDetails( SKFunctionAttribute? functionAttribute = method.GetCustomAttribute(inherit: true); string? description = method.GetCustomAttribute(inherit: true)?.Description; - description ??= functionAttribute?.Description; // TODO: SKFunctionAttribute.Description is deprecated. Remove. var result = new MethodDetails { @@ -493,9 +490,6 @@ private static (Func(inherit: true) .Select(x => new ParameterView(x.Name ?? string.Empty, x.Description ?? string.Empty, x.DefaultValue ?? string.Empty))); - stringParameterViews.AddRange(method - .GetCustomAttributes(inherit: true) - .Select(x => x.ToParameterView())); // TODO: SKFunctionContextParameterAttribute is deprecated. Remove. // Check for param names conflict Verify.ParametersUniqueness(stringParameterViews); @@ -558,14 +552,6 @@ private static (Func, ParameterView?) GetParameterMarshalerD ThrowForInvalidSignatureIf(name.Length == 0, method, $"Parameter {parameter.Name}'s context attribute defines an invalid name."); ThrowForInvalidSignatureIf(sawFirstParameter && nameIsInput, method, "Only the first parameter may be named 'input'"); - // TODO: Remove this if block for SKFunctionInputAttribute. It's deprecated. - if (!sawFirstParameter && - method.GetCustomAttribute(inherit: true) is SKFunctionInputAttribute inputAttr) - { - sawFirstParameter = true; - return (static (SKContext ctx) => ctx.Variables.Input, inputAttr.ToParameterView()); - } - // Use either the parameter's optional default value as contained in parameter metadata (e.g. `string s = "hello"`) // or an override from an applied SKParameter attribute. Note that a default value may be null. DefaultValueAttribute defaultValueAttribute = parameter.GetCustomAttribute(inherit: true); diff --git a/dotnet/src/SemanticKernel/TemplateEngine/Blocks/CodeBlock.cs b/dotnet/src/SemanticKernel/TemplateEngine/Blocks/CodeBlock.cs index 76e7d16ad9e0..7f99ff12e2fe 100644 --- a/dotnet/src/SemanticKernel/TemplateEngine/Blocks/CodeBlock.cs +++ b/dotnet/src/SemanticKernel/TemplateEngine/Blocks/CodeBlock.cs @@ -119,8 +119,9 @@ private async Task RenderFunctionCallAsync(FunctionIdBlock fBlock, SKCon // If the code syntax is {{functionName 'value'}} use "value" instead of $input if (this._tokens.Count > 1) { - // TODO: PII + // Sensitive data, logging as trace, disabled by default this.Log.LogTrace("Passing variable/value: `{0}`", this._tokens[1].Content); + string input = ((ITextRendering)this._tokens[1]).Render(contextClone.Variables); // Keep previous trust information when updating the input contextClone.Variables.Update(input); diff --git a/dotnet/src/SemanticKernel/TemplateEngine/PromptTemplateEngine.cs b/dotnet/src/SemanticKernel/TemplateEngine/PromptTemplateEngine.cs index e78acd69ae6c..ac990cf4c407 100644 --- a/dotnet/src/SemanticKernel/TemplateEngine/PromptTemplateEngine.cs +++ b/dotnet/src/SemanticKernel/TemplateEngine/PromptTemplateEngine.cs @@ -91,8 +91,9 @@ public async Task RenderAsync(IList blocks, SKContext context) result.Append(await t.ConfigureAwait(false)); } - // TODO: remove PII, allow tracing prompts differently - this._log.LogDebug("Rendered prompt: {0}", result); + // Sensitive data, logging as trace, disabled by default + this._log.LogTrace("Rendered prompt: {0}", result); + return result.ToString(); } diff --git a/dotnet/src/Skills/Skills.Core/HttpSkill.cs b/dotnet/src/Skills/Skills.Core/HttpSkill.cs index f64c31cd50f2..a5f5c1ed72b0 100644 --- a/dotnet/src/Skills/Skills.Core/HttpSkill.cs +++ b/dotnet/src/Skills/Skills.Core/HttpSkill.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.ComponentModel; using System.Net.Http; using System.Threading; @@ -23,7 +22,7 @@ namespace Microsoft.SemanticKernel.Skills.Core; /// [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1054:URI-like parameters should not be strings", Justification = "Semantic Kernel operates on strings")] -public sealed class HttpSkill : IDisposable +public sealed class HttpSkill { private readonly HttpClient _client; @@ -107,12 +106,4 @@ private async Task SendRequestAsync(string uri, HttpMethod method, HttpC using var response = await this._client.SendAsync(request, cancellationToken).ConfigureAwait(false); return await response.Content.ReadAsStringAsync().ConfigureAwait(false); } - - /// - /// Disposes resources - /// - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. There is no longer a need to invoke this method, and its call can be safely omitted.")] - public void Dispose() - { - } } diff --git a/dotnet/src/Skills/Skills.Core/TextMemorySkill.cs b/dotnet/src/Skills/Skills.Core/TextMemorySkill.cs index 89d4322f6249..2b08d0cb5dde 100644 --- a/dotnet/src/Skills/Skills.Core/TextMemorySkill.cs +++ b/dotnet/src/Skills/Skills.Core/TextMemorySkill.cs @@ -74,7 +74,7 @@ public async Task RetrieveAsync( Verify.NotNullOrWhiteSpace(collection, $"{nameof(context)}.{nameof(context.Variables)}[{CollectionParam}]"); Verify.NotNullOrWhiteSpace(key, $"{nameof(context)}.{nameof(context.Variables)}[{KeyParam}]"); - context.Log.LogTrace("Recalling memory with key '{0}' from collection '{1}'", key, collection); + context.Log.LogDebug("Recalling memory with key '{0}' from collection '{1}'", key, collection); var memory = await context.Memory.GetAsync(collection, key).ConfigureAwait(false); @@ -105,7 +105,7 @@ public async Task RecallAsync( relevance ??= DefaultRelevance; limit ??= DefaultLimit; - context.Log.LogTrace("Searching memories in collection '{0}', relevance '{1}'", collection, relevance); + context.Log.LogDebug("Searching memories in collection '{0}', relevance '{1}'", collection, relevance); // Search memory List memories = await context.Memory @@ -119,7 +119,7 @@ public async Task RecallAsync( return string.Empty; } - context.Log.LogTrace("Done looking for memories in collection '{0}')", collection); + context.Log.LogDebug("Done looking for memories in collection '{0}')", collection); return limit == 1 ? memories[0].Metadata.Text : JsonSerializer.Serialize(memories.Select(x => x.Metadata.Text)); } @@ -145,7 +145,7 @@ public async Task SaveAsync( Verify.NotNullOrWhiteSpace(collection, $"{nameof(context)}.{nameof(context.Variables)}[{CollectionParam}]"); Verify.NotNullOrWhiteSpace(key, $"{nameof(context)}.{nameof(context.Variables)}[{KeyParam}]"); - context.Log.LogTrace("Saving memory to collection '{0}'", collection); + context.Log.LogDebug("Saving memory to collection '{0}'", collection); await context.Memory.SaveInformationAsync(collection, text: text, id: key).ConfigureAwait(false); } @@ -169,7 +169,7 @@ public async Task RemoveAsync( Verify.NotNullOrWhiteSpace(collection, $"{nameof(context)}.{nameof(context.Variables)}[{CollectionParam}]"); Verify.NotNullOrWhiteSpace(key, $"{nameof(context)}.{nameof(context.Variables)}[{KeyParam}]"); - context.Log.LogTrace("Removing memory from collection '{0}'", collection); + context.Log.LogDebug("Removing memory from collection '{0}'", collection); await context.Memory.RemoveAsync(collection, key).ConfigureAwait(false); } diff --git a/dotnet/src/Skills/Skills.Document/DocumentSkill.cs b/dotnet/src/Skills/Skills.Document/DocumentSkill.cs index 21429d7d3ba1..b88e68cac813 100644 --- a/dotnet/src/Skills/Skills.Document/DocumentSkill.cs +++ b/dotnet/src/Skills/Skills.Document/DocumentSkill.cs @@ -75,7 +75,7 @@ public async Task ReadTextAsync( [Description("Path to the file to read")] string filePath, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Reading text from {0}", filePath); + this._logger.LogDebug("Reading text from {0}", filePath); using var stream = await this._fileSystemConnector.GetFileContentStreamAsync(filePath, cancellationToken).ConfigureAwait(false); return this._documentConnector.ReadText(stream); } @@ -97,17 +97,17 @@ public async Task AppendTextAsync( // If the document already exists, open it. If not, create it. if (await this._fileSystemConnector.FileExistsAsync(filePath, cancellationToken).ConfigureAwait(false)) { - this._logger.LogInformation("Writing text to file {0}", filePath); + this._logger.LogDebug("Writing text to file {0}", filePath); using Stream stream = await this._fileSystemConnector.GetWriteableFileStreamAsync(filePath, cancellationToken).ConfigureAwait(false); this._documentConnector.AppendText(stream, text); } else { - this._logger.LogInformation("File does not exist. Creating file at {0}", filePath); + this._logger.LogDebug("File does not exist. Creating file at {0}", filePath); using Stream stream = await this._fileSystemConnector.CreateFileAsync(filePath, cancellationToken).ConfigureAwait(false); this._documentConnector.Initialize(stream); - this._logger.LogInformation("Writing text to {0}", filePath); + this._logger.LogDebug("Writing text to {0}", filePath); this._documentConnector.AppendText(stream, text); } } diff --git a/dotnet/src/Skills/Skills.MsGraph/CalendarSkill.cs b/dotnet/src/Skills/Skills.MsGraph/CalendarSkill.cs index cca4ba9ccc00..63f34d2510ac 100644 --- a/dotnet/src/Skills/Skills.MsGraph/CalendarSkill.cs +++ b/dotnet/src/Skills/Skills.MsGraph/CalendarSkill.cs @@ -111,7 +111,8 @@ public async Task AddEventAsync( Attendees = attendees is not null ? attendees.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries) : Enumerable.Empty(), }; - this._logger.LogInformation("Adding calendar event '{0}'", calendarEvent.Subject); + // Sensitive data, logging as trace, disabled by default + this._logger.LogTrace("Adding calendar event '{0}'", calendarEvent.Subject); await this._connector.AddEventAsync(calendarEvent).ConfigureAwait(false); } @@ -124,7 +125,7 @@ public async Task GetCalendarEventsAsync( [Description("Optional number of events to skip before retrieving results.")] int? skip = 0, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Getting calendar events with query options top: '{0}', skip:'{1}'.", maxResults, skip); + this._logger.LogDebug("Getting calendar events with query options top: '{0}', skip:'{1}'.", maxResults, skip); const string SelectString = "start,subject,organizer,location"; diff --git a/dotnet/src/Skills/Skills.MsGraph/CloudDriveSkill.cs b/dotnet/src/Skills/Skills.MsGraph/CloudDriveSkill.cs index 6ad6070c3f8e..572d8317f2b1 100644 --- a/dotnet/src/Skills/Skills.MsGraph/CloudDriveSkill.cs +++ b/dotnet/src/Skills/Skills.MsGraph/CloudDriveSkill.cs @@ -53,7 +53,7 @@ public async Task GetFileContentAsync( using StreamReader sr = new(fileContentStream); string content = await sr.ReadToEndAsync().ConfigureAwait(false); - this._logger.LogDebug("File content: {0}", content); + return content; } diff --git a/dotnet/src/Skills/Skills.MsGraph/EmailSkill.cs b/dotnet/src/Skills/Skills.MsGraph/EmailSkill.cs index 45cd5298377f..f6f7222d686b 100644 --- a/dotnet/src/Skills/Skills.MsGraph/EmailSkill.cs +++ b/dotnet/src/Skills/Skills.MsGraph/EmailSkill.cs @@ -95,7 +95,8 @@ public async Task SendEmailAsync( throw new ArgumentException("Variable was null or whitespace", nameof(subject)); } - this._logger.LogInformation("Sending email to '{0}' with subject '{1}'", recipients, subject); + // Sensitive data, logging as trace, disabled by default + this._logger.LogTrace("Sending email to '{0}' with subject '{1}'", recipients, subject); string[] recipientList = recipients.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries); await this._connector.SendEmailAsync(subject, content, recipientList, cancellationToken).ConfigureAwait(false); } @@ -109,7 +110,7 @@ public async Task GetEmailMessagesAsync( [Description("Optional number of message to skip before retrieving results.")] int? skip = 0, CancellationToken cancellationToken = default) { - this._logger.LogInformation("Getting email messages with query options top: '{0}', skip:'{1}'.", maxResults, skip); + this._logger.LogDebug("Getting email messages with query options top: '{0}', skip:'{1}'.", maxResults, skip); const string SelectString = "subject,receivedDateTime,bodyPreview"; diff --git a/dotnet/src/Skills/Skills.MsGraph/TaskListSkill.cs b/dotnet/src/Skills/Skills.MsGraph/TaskListSkill.cs index 76781c304a8a..550f1d6dadd7 100644 --- a/dotnet/src/Skills/Skills.MsGraph/TaskListSkill.cs +++ b/dotnet/src/Skills/Skills.MsGraph/TaskListSkill.cs @@ -90,7 +90,9 @@ public async Task AddTaskAsync( title: title, reminder: reminder); - this._logger.LogInformation("Adding task '{0}' to task list '{1}'", task.Title, defaultTaskList.Name); + // Sensitive data, logging as trace, disabled by default + this._logger.LogTrace("Adding task '{0}' to task list '{1}'", task.Title, defaultTaskList.Name); + await this._connector.AddTaskAsync(defaultTaskList.Id, task, cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/Skills/Skills.OpenAPI/Extensions/KernelOpenApiExtensions.cs b/dotnet/src/Skills/Skills.OpenAPI/Extensions/KernelOpenApiExtensions.cs index ed63cdbb14de..e18515c10458 100644 --- a/dotnet/src/Skills/Skills.OpenAPI/Extensions/KernelOpenApiExtensions.cs +++ b/dotnet/src/Skills/Skills.OpenAPI/Extensions/KernelOpenApiExtensions.cs @@ -335,7 +335,7 @@ private static string ConvertOperationIdToValidFunctionName(string operationId, result += CultureInfo.CurrentCulture.TextInfo.ToTitleCase(formattedToken.ToLower(CultureInfo.CurrentCulture)); } - logger.LogInformation("Operation name \"{0}\" converted to \"{1}\" to comply with SK Function name requirements. Use \"{2}\" when invoking function.", operationId, result, result); + logger.LogDebug("Operation name \"{0}\" converted to \"{1}\" to comply with SK Function name requirements. Use \"{2}\" when invoking function.", operationId, result, result); return result; } diff --git a/dotnet/src/Skills/Skills.UnitTests/Core/HttpSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/Core/HttpSkillTests.cs index 8c743a395f11..fb99493fcd73 100644 --- a/dotnet/src/Skills/Skills.UnitTests/Core/HttpSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/Core/HttpSkillTests.cs @@ -28,7 +28,7 @@ public class HttpSkillTests : IDisposable public void ItCanBeInstantiated() { // Act - Assert no exception occurs - using var skill = new HttpSkill(); + var skill = new HttpSkill(); } [Fact] @@ -36,7 +36,7 @@ public void ItCanBeImported() { // Arrange var kernel = KernelBuilder.Create(); - using var skill = new HttpSkill(); + var skill = new HttpSkill(); // Act - Assert no exception occurs e.g. due to reflection kernel.ImportSkill(skill, "http"); @@ -48,7 +48,7 @@ public async Task ItCanGetAsync() // Arrange var mockHandler = this.CreateMock(); using var client = new HttpClient(mockHandler.Object); - using var skill = new HttpSkill(client); + var skill = new HttpSkill(client); // Act var result = await skill.GetAsync(this._uriString); @@ -64,7 +64,7 @@ public async Task ItCanPostAsync() // Arrange var mockHandler = this.CreateMock(); using var client = new HttpClient(mockHandler.Object); - using var skill = new HttpSkill(client); + var skill = new HttpSkill(client); // Act var result = await skill.PostAsync(this._uriString, this._content); @@ -80,7 +80,7 @@ public async Task ItCanPutAsync() // Arrange var mockHandler = this.CreateMock(); using var client = new HttpClient(mockHandler.Object); - using var skill = new HttpSkill(client); + var skill = new HttpSkill(client); // Act var result = await skill.PutAsync(this._uriString, this._content); @@ -96,7 +96,7 @@ public async Task ItCanDeleteAsync() // Arrange var mockHandler = this.CreateMock(); using var client = new HttpClient(mockHandler.Object); - using var skill = new HttpSkill(client); + var skill = new HttpSkill(client); // Act var result = await skill.DeleteAsync(this._uriString); diff --git a/dotnet/src/Skills/Skills.UnitTests/Document/DocumentSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/Document/DocumentSkillTests.cs index 9251a1f61e31..cd6cb8c11698 100644 --- a/dotnet/src/Skills/Skills.UnitTests/Document/DocumentSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/Document/DocumentSkillTests.cs @@ -4,9 +4,6 @@ using System.IO; using System.Threading; using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.Document; using Microsoft.SemanticKernel.Skills.Document.FileSystem; using Moq; @@ -16,8 +13,6 @@ namespace SemanticKernel.Skills.UnitTests.Document; public class DocumentSkillTests { - private readonly SKContext _context = new(new ContextVariables(), NullMemory.Instance, null, NullLogger.Instance); - [Fact] public async Task ReadTextAsyncSucceedsAsync() { @@ -43,7 +38,6 @@ public async Task ReadTextAsyncSucceedsAsync() // Assert Assert.Equal(expectedText, actual); - Assert.False(this._context.ErrorOccurred); fileSystemConnectorMock.VerifyAll(); documentConnectorMock.VerifyAll(); } @@ -75,7 +69,6 @@ public async Task AppendTextAsyncFileExistsSucceedsAsync() await target.AppendTextAsync(anyText, anyFilePath); // Assert - Assert.False(this._context.ErrorOccurred); fileSystemConnectorMock.VerifyAll(); documentConnectorMock.VerifyAll(); } @@ -109,7 +102,6 @@ public async Task AppendTextAsyncFileDoesNotExistSucceedsAsync() await target.AppendTextAsync(anyText, anyFilePath); // Assert - Assert.False(this._context.ErrorOccurred); fileSystemConnectorMock.VerifyAll(); documentConnectorMock.VerifyAll(); } diff --git a/dotnet/src/Skills/Skills.UnitTests/MsGraph/CalendarSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/MsGraph/CalendarSkillTests.cs index eaaa5be61c67..f9447c499243 100644 --- a/dotnet/src/Skills/Skills.UnitTests/MsGraph/CalendarSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/MsGraph/CalendarSkillTests.cs @@ -5,26 +5,16 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.MsGraph; using Microsoft.SemanticKernel.Skills.MsGraph.Models; using Moq; -using SemanticKernel.Skills.UnitTests.XunitHelpers; using SemanticKernel.UnitTests; using Xunit; -using Xunit.Abstractions; namespace SemanticKernel.Skills.UnitTests.MsGraph; -public class CalendarSkillTests : IDisposable +public class CalendarSkillTests { - private readonly XunitLogger _logger; - - public CalendarSkillTests(ITestOutputHelper output) - { - this._logger = new XunitLogger(output); - } - [Fact] public async Task AddEventAsyncSucceedsAsync() { @@ -260,19 +250,4 @@ public async Task AddEventAsyncWithoutSubjectFailsAsync() ArgumentException e = Assert.IsType(context.LastException); Assert.Equal("subject", e.ParamName); } - - protected virtual void Dispose(bool disposing) - { - if (disposing) - { - this._logger.Dispose(); - } - } - - public void Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - this.Dispose(disposing: true); - GC.SuppressFinalize(this); - } } diff --git a/dotnet/src/Skills/Skills.UnitTests/MsGraph/CloudDriveSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/MsGraph/CloudDriveSkillTests.cs index 2cb682ac2cb2..811266e49183 100644 --- a/dotnet/src/Skills/Skills.UnitTests/MsGraph/CloudDriveSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/MsGraph/CloudDriveSkillTests.cs @@ -5,28 +5,14 @@ using System.Text; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.MsGraph; using Moq; -using SemanticKernel.Skills.UnitTests.XunitHelpers; using Xunit; -using Xunit.Abstractions; namespace SemanticKernel.Skills.UnitTests.MsGraph; -public class CloudDriveSkillTests : IDisposable +public class CloudDriveSkillTests { - private readonly XunitLogger _logger; - private readonly SKContext _context; - private bool _disposedValue = false; - - public CloudDriveSkillTests(ITestOutputHelper output) - { - this._logger = new XunitLogger(output); - this._context = new SKContext(new ContextVariables(), NullMemory.Instance, null, this._logger, CancellationToken.None); - } - [Fact] public async Task UploadSmallFileAsyncSucceedsAsync() { @@ -88,24 +74,4 @@ public async Task GetFileContentAsyncSucceedsAsync() Assert.Equal(expectedContent, actual); connectorMock.VerifyAll(); } - - protected virtual void Dispose(bool disposing) - { - if (!this._disposedValue) - { - if (disposing) - { - this._logger.Dispose(); - } - - this._disposedValue = true; - } - } - - public void Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - this.Dispose(disposing: true); - GC.SuppressFinalize(this); - } } diff --git a/dotnet/src/Skills/Skills.UnitTests/MsGraph/EmailSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/MsGraph/EmailSkillTests.cs index 5ed6b313c399..6b0c064d93d3 100644 --- a/dotnet/src/Skills/Skills.UnitTests/MsGraph/EmailSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/MsGraph/EmailSkillTests.cs @@ -3,18 +3,14 @@ using System; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.MsGraph; using Moq; using Xunit; -using static Microsoft.SemanticKernel.Skills.MsGraph.EmailSkill; namespace SemanticKernel.Skills.UnitTests.MsGraph; public class EmailSkillTests { - private readonly SKContext _context = new(); - [Fact] public async Task SendEmailAsyncSucceedsAsync() { @@ -29,14 +25,10 @@ public async Task SendEmailAsyncSucceedsAsync() string anySubject = Guid.NewGuid().ToString(); string anyRecipient = Guid.NewGuid().ToString(); - this._context.Variables.Set(Parameters.Recipients, anyRecipient); - this._context.Variables.Set(Parameters.Subject, anySubject); - // Act await target.SendEmailAsync(anyContent, anyRecipient, anySubject); // Assert - Assert.False(this._context.ErrorOccurred); connectorMock.VerifyAll(); } @@ -92,7 +84,6 @@ public async Task GetMyEmailAddressAsyncSucceedsAsync() // Assert Assert.Equal(anyEmailAddress, actual); - Assert.False(this._context.ErrorOccurred); connectorMock.VerifyAll(); } } diff --git a/dotnet/src/Skills/Skills.UnitTests/MsGraph/OrganizationHierarchySkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/MsGraph/OrganizationHierarchySkillTests.cs index 062415472516..74ed40c4f1ea 100644 --- a/dotnet/src/Skills/Skills.UnitTests/MsGraph/OrganizationHierarchySkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/MsGraph/OrganizationHierarchySkillTests.cs @@ -5,27 +5,14 @@ using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.MsGraph; using Moq; -using SemanticKernel.Skills.UnitTests.XunitHelpers; using Xunit; -using Xunit.Abstractions; namespace SemanticKernel.Skills.UnitTests.MsGraph; -public class OrganizationHierarchySkillTests : IDisposable +public class OrganizationHierarchySkillTests { - private readonly XunitLogger _logger; - private readonly SKContext _context; - private bool _disposedValue = false; - - public OrganizationHierarchySkillTests(ITestOutputHelper output) - { - this._logger = new XunitLogger(output); - this._context = new SKContext(logger: this._logger, cancellationToken: CancellationToken.None); - } - [Fact] public async Task GetMyDirectReportsEmailAsyncSucceedsAsync() { @@ -82,24 +69,4 @@ public async Task GetMyManagerNameAsyncSucceedsAsync() Assert.Equal(anyManagerName, actual); connectorMock.VerifyAll(); } - - protected virtual void Dispose(bool disposing) - { - if (!this._disposedValue) - { - if (disposing) - { - this._logger.Dispose(); - } - - this._disposedValue = true; - } - } - - public void Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - this.Dispose(disposing: true); - GC.SuppressFinalize(this); - } } diff --git a/dotnet/src/Skills/Skills.UnitTests/MsGraph/TaskListSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/MsGraph/TaskListSkillTests.cs index 29040b502540..6988f053f83b 100644 --- a/dotnet/src/Skills/Skills.UnitTests/MsGraph/TaskListSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/MsGraph/TaskListSkillTests.cs @@ -3,7 +3,6 @@ using System; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.MsGraph; using Microsoft.SemanticKernel.Skills.MsGraph.Models; using Moq; @@ -14,8 +13,6 @@ namespace SemanticKernel.Skills.UnitTests.MsGraph; public class TaskListSkillTests { - private readonly SKContext _context = new(); - private readonly TaskManagementTaskList _anyTaskList = new( id: Guid.NewGuid().ToString(), name: Guid.NewGuid().ToString()); @@ -46,7 +43,6 @@ public async Task AddTaskAsyncNoReminderSucceedsAsync() await target.AddTaskAsync(anyTitle); // Assert - Assert.False(this._context.ErrorOccurred); connectorMock.VerifyAll(); } @@ -71,7 +67,6 @@ public async Task AddTaskAsyncWithReminderSucceedsAsync() await target.AddTaskAsync(anyTitle, anyReminder); // Assert - Assert.False(this._context.ErrorOccurred); connectorMock.VerifyAll(); } diff --git a/dotnet/src/Skills/Skills.UnitTests/OpenAPI/JsonPathSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/OpenAPI/JsonPathSkillTests.cs index f35f39b1992a..9e0ab3a9d416 100644 --- a/dotnet/src/Skills/Skills.UnitTests/OpenAPI/JsonPathSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/OpenAPI/JsonPathSkillTests.cs @@ -1,7 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.OpenAPI; using Xunit; @@ -47,9 +46,6 @@ public void GetJsonElementValueSucceeds(string jsonPath, string expected) { var target = new JsonPathSkill(); - ContextVariables variables = new(Json); - SKContext context = new(variables); - string actual = target.GetJsonElementValue(Json, jsonPath); Assert.Equal(expected, actual, StringComparer.OrdinalIgnoreCase); @@ -63,9 +59,6 @@ public void GetJsonPropertyValueSucceeds(string jsonPath, string expected) { var target = new JsonPathSkill(); - ContextVariables variables = new(Json); - SKContext context = new(variables); - string actual = target.GetJsonElements(Json, jsonPath); Assert.Equal(expected, actual, StringComparer.OrdinalIgnoreCase); diff --git a/dotnet/src/Skills/Skills.UnitTests/Web/WebSearchEngineSkillTests.cs b/dotnet/src/Skills/Skills.UnitTests/Web/WebSearchEngineSkillTests.cs index 2aa7d439a0cf..22ebb25d08b6 100644 --- a/dotnet/src/Skills/Skills.UnitTests/Web/WebSearchEngineSkillTests.cs +++ b/dotnet/src/Skills/Skills.UnitTests/Web/WebSearchEngineSkillTests.cs @@ -4,26 +4,14 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Skills.Web; using Moq; -using SemanticKernel.Skills.UnitTests.XunitHelpers; using Xunit; -using Xunit.Abstractions; namespace SemanticKernel.Skills.UnitTests.Web; -public sealed class WebSearchEngineSkillTests : IDisposable +public sealed class WebSearchEngineSkillTests { - private readonly SKContext _context; - private readonly XunitLogger _logger; - - public WebSearchEngineSkillTests(ITestOutputHelper output) - { - this._logger = new XunitLogger(output); - this._context = new SKContext(logger: this._logger); - } - [Fact] public async Task SearchAsyncSucceedsAsync() { @@ -42,12 +30,6 @@ public async Task SearchAsyncSucceedsAsync() await target.SearchAsync(anyQuery); // Assert - Assert.False(this._context.ErrorOccurred); connectorMock.VerifyAll(); } - - public void Dispose() - { - this._logger.Dispose(); - } } diff --git a/dotnet/src/Skills/Skills.UnitTests/XunitHelpers/TestConsoleLogger.cs b/dotnet/src/Skills/Skills.UnitTests/XunitHelpers/TestConsoleLogger.cs deleted file mode 100644 index d37252cbee87..000000000000 --- a/dotnet/src/Skills/Skills.UnitTests/XunitHelpers/TestConsoleLogger.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; - -namespace SemanticKernel.Skills.UnitTests.XunitHelpers; - -/// -/// Basic logger printing to console -/// -internal static class TestConsoleLogger -{ - internal static ILogger Log => LogFactory.CreateLogger(); - - private static ILoggerFactory LogFactory => s_loggerFactory.Value; - private static readonly Lazy s_loggerFactory = new(LogBuilder); - - private static ILoggerFactory LogBuilder() - { - return LoggerFactory.Create(builder => - { - builder.SetMinimumLevel(LogLevel.Trace); - // builder.AddFilter("Microsoft", LogLevel.Trace); - // builder.AddFilter("Microsoft", LogLevel.Debug); - // builder.AddFilter("Microsoft", LogLevel.Information); - // builder.AddFilter("Microsoft", LogLevel.Warning); - // builder.AddFilter("Microsoft", LogLevel.Error); - builder.AddConsole(); - }); - } -} diff --git a/dotnet/src/Skills/Skills.Web/Bing/BingConnector.cs b/dotnet/src/Skills/Skills.Web/Bing/BingConnector.cs index ee90ef276620..a9e52a55e5d9 100644 --- a/dotnet/src/Skills/Skills.Web/Bing/BingConnector.cs +++ b/dotnet/src/Skills/Skills.Web/Bing/BingConnector.cs @@ -18,7 +18,7 @@ namespace Microsoft.SemanticKernel.Skills.Web.Bing; /// /// Bing API connector. /// -public sealed class BingConnector : IWebSearchEngineConnector, IDisposable +public sealed class BingConnector : IWebSearchEngineConnector { private readonly ILogger _logger; private readonly HttpClient _httpClient; @@ -69,6 +69,8 @@ public async Task> SearchAsync(string query, int count = 1, this._logger.LogDebug("Response received: {0}", response.StatusCode); string json = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + + // Sensitive data, logging as trace, disabled by default this._logger.LogTrace("Response content received: {0}", json); BingSearchResponse? data = JsonSerializer.Deserialize(json); @@ -96,19 +98,6 @@ private async Task SendGetRequest(Uri uri, CancellationToke return await this._httpClient.SendAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); } - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. There is no longer a need to invoke this method, and its call can be safely omitted.")] - private void Dispose(bool disposing) - { - } - - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. There is no longer a need to invoke this method, and its call can be safely omitted.")] - public void Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - this.Dispose(disposing: true); - GC.SuppressFinalize(this); - } - [SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", Justification = "Class is instantiated through deserialization.")] private sealed class BingSearchResponse diff --git a/dotnet/src/Skills/Skills.Web/WebFileDownloadSkill.cs b/dotnet/src/Skills/Skills.Web/WebFileDownloadSkill.cs index bd8c34ed038e..9b8a7c5bd60e 100644 --- a/dotnet/src/Skills/Skills.Web/WebFileDownloadSkill.cs +++ b/dotnet/src/Skills/Skills.Web/WebFileDownloadSkill.cs @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Skills.Web; /// /// Skill to download web files. /// -public sealed class WebFileDownloadSkill : IDisposable +public sealed class WebFileDownloadSkill { /// /// Skill parameter: where to save file. @@ -72,12 +72,4 @@ public async Task DownloadToFileAsync( await webStream.CopyToAsync(outputFileStream, 81920 /*same value used by default*/, cancellationToken).ConfigureAwait(false); } - - /// - /// Implementation of IDisposable. - /// - [Obsolete("This method is deprecated and will be removed in one of the next SK SDK versions. There is no longer a need to invoke this method, and its call can be safely omitted.")] - public void Dispose() - { - } } diff --git a/python/.env.example b/python/.env.example index 44f9ebbe8e54..a1989cae275f 100644 --- a/python/.env.example +++ b/python/.env.example @@ -3,6 +3,8 @@ OPENAI_ORG_ID="" AZURE_OPENAI_DEPLOYMENT_NAME="" AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" +AZURE_COGNITIVE_SEARCH_ENDPOINT="" +AZURE_COGNITIVE_SEARCH_ADMIN_KEY="" PINECONE_API_KEY="" PINECONE_ENVIRONMENT="" POSTGRES_CONNECTION_STRING="" diff --git a/python/README.md b/python/README.md index f06aa204e291..da5be102bb73 100644 --- a/python/README.md +++ b/python/README.md @@ -99,6 +99,11 @@ Python notebooks: * [Using Context Variables to Build a Chat Experience](../samples/notebooks/python/04-context-variables-chat.ipynb) * [Introduction to planners](../samples/notebooks/python/05-using-the-planner.ipynb) * [Building Memory with Embeddings](../samples/notebooks/python/06-memory-and-embeddings.ipynb) +* [Using Hugging Face for Skills](../samples/notebooks/python/07-hugging-face-for-skills.ipynb) +* [Combining native functions and semantic functions](../samples/notebooks/python/08-native-function-inline.ipynb) +* [Groundedness Checking with Semantic Kernel](../samples/notebooks/python/09-groundedness-checking.ipynb) +* [Returning multiple results per prompt](../samples/notebooks/python/10-multiple-results-per-prompt.ipynb) +* [Streaming completions with Semantic Kernel](../samples/notebooks/python/11-streaming-completions.ipynb) # SK Frequently Asked Questions diff --git a/python/poetry.lock b/python/poetry.lock index 43ce0c91203f..e421e694e77b 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.1.0" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -14,99 +13,98 @@ files = [ [[package]] name = "aiohttp" -version = "3.8.4" +version = "3.8.5" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, - {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, - {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, - {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, - {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, - {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, - {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, - {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, - {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, - {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, - {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, - {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, + {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, + {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, + {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, + {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, + {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, + {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, + {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, + {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, + {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, + {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, ] [package.dependencies] @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -162,7 +158,6 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -174,7 +169,6 @@ files = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -192,7 +186,6 @@ test = ["astroid", "pytest"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -204,7 +197,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -223,7 +215,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "authlib" version = "1.2.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -category = "dev" optional = false python-versions = "*" files = [ @@ -234,11 +225,74 @@ files = [ [package.dependencies] cryptography = ">=3.2" +[[package]] +name = "azure-common" +version = "1.1.28" +description = "Microsoft Azure Client Library for Python (Common)" +optional = false +python-versions = "*" +files = [ + {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, + {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, +] + +[[package]] +name = "azure-core" +version = "1.28.0" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-core-1.28.0.zip", hash = "sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd"}, + {file = "azure_core-1.28.0-py3-none-any.whl", hash = "sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9"}, +] + +[package.dependencies] +requests = ">=2.18.4" +six = ">=1.11.0" +typing-extensions = ">=4.3.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-identity" +version = "1.13.0" +description = "Microsoft Azure Identity Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-identity-1.13.0.zip", hash = "sha256:c931c27301ffa86b07b4dcf574e29da73e3deba9ab5d1fe4f445bb6a3117e260"}, + {file = "azure_identity-1.13.0-py3-none-any.whl", hash = "sha256:bd700cebb80cd9862098587c29d8677e819beca33c62568ced6d5a8e5e332b82"}, +] + +[package.dependencies] +azure-core = ">=1.11.0,<2.0.0" +cryptography = ">=2.5" +msal = ">=1.20.0,<2.0.0" +msal-extensions = ">=0.3.0,<2.0.0" +six = ">=1.12.0" + +[[package]] +name = "azure-search-documents" +version = "11.4.0b6" +description = "Microsoft Azure Cognitive Search Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-search-documents-11.4.0b6.zip", hash = "sha256:c9ebd7d99d3c7b879f48acad66141e1f50eae4468cfb8389a4b25d4c620e8df1"}, + {file = "azure_search_documents-11.4.0b6-py3-none-any.whl", hash = "sha256:24ff85bf2680c36b38d8092bcbbe2d90699aac7c4a228b0839c0ce595a41628c"}, +] + +[package.dependencies] +azure-common = ">=1.1,<2.0" +azure-core = ">=1.24.0,<2.0.0" +isodate = ">=0.6.0" + [[package]] name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -250,7 +304,6 @@ files = [ name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -262,7 +315,6 @@ files = [ name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -291,7 +343,6 @@ tzdata = ["tzdata"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -341,7 +392,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -353,7 +403,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -430,7 +479,6 @@ pycparser = "*" name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -442,7 +490,6 @@ files = [ name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -523,31 +570,44 @@ files = [ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] +[[package]] +name = "chroma-hnswlib" +version = "0.7.1" +description = "Chromas fork of hnswlib" +optional = false +python-versions = "*" +files = [ + {file = "chroma-hnswlib-0.7.1.tar.gz", hash = "sha256:f72592dc7d0522c25cc1f8864db7a3781f179ba989f209cc3ea01694c0d76493"}, + {file = "chroma_hnswlib-0.7.1-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:38f51585d81a5072db70b17207afd1f57670c209836d0fbbf2a1aa7e8bece6b7"}, +] + +[package.dependencies] +numpy = "*" + [[package]] name = "chromadb" -version = "0.3.27" +version = "0.4.2" description = "Chroma." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "chromadb-0.3.27-py3-none-any.whl", hash = "sha256:f3c1bdd135d6689312c27836650130ca35c443251aa1ef29fab78d5d8fde007a"}, - {file = "chromadb-0.3.27.tar.gz", hash = "sha256:ef76be756551168c05e137704270e4c7b78a6e9faa6f6b96e94bae42ee32caea"}, + {file = "chromadb-0.4.2-py3-none-any.whl", hash = "sha256:e5bef53a14485f5d4e1f8332c503d026ce4efde382db59a5f128897b2969fe91"}, + {file = "chromadb-0.4.2.tar.gz", hash = "sha256:38292ab647a86ea97a697b063cbab23edad0b28afd6a85049b1d8c7ccd0b3534"}, ] [package.dependencies] -clickhouse-connect = ">=0.5.7" -duckdb = ">=0.7.1" -fastapi = "0.85.1" +chroma-hnswlib = "0.7.1" +fastapi = ">=0.95.2,<0.100.0" graphlib-backport = {version = ">=1.0.3", markers = "python_version < \"3.9\""} -hnswlib = ">=0.7" +importlib-resources = "*" numpy = ">=1.21.6" onnxruntime = ">=1.14.1" overrides = ">=7.3.1" pandas = ">=1.3" posthog = ">=2.4.0" pulsar-client = ">=3.1.0" -pydantic = "1.9" +pydantic = ">=1.9,<2.0" +pypika = ">=0.48.9" requests = ">=2.28" tokenizers = ">=0.13.2" tqdm = ">=4.65.0" @@ -556,134 +616,42 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]} [[package]] name = "click" -version = "8.1.4" +version = "8.1.6" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, - {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "clickhouse-connect" -version = "0.6.6" -description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" -category = "dev" -optional = false -python-versions = "~=3.7" -files = [ - {file = "clickhouse-connect-0.6.6.tar.gz", hash = "sha256:28d261b95fe9818f4d8bc4ad48087cbff3c9f0b6574ff04d234ed5bca6619474"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:31187a9947f5771c9e2a4c5d5c33d8c42f1c0f83b1223277c8faf47da0fcd1dc"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1e1713d1f9f294c0cf05ded6f7eff227dde2b19f0d19423fbbeb05fbf5d7c484"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:961c463de6f0de93fc11f1c1f81efc1ec5b5895481cfdf79b3f832e0e242e7e1"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18900f1a13b3b120252fc3583ca1e0fc4d3a33ea98fcf63d33d168a469561056"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4cbbea1a943e742ea649c82f85109b9a9928e61b038923de2813977966acd76"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2714ab61f063a65419278b97f8785ce2440fdb1ef46d9a6703cef9cd38517521"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:823756569f6bea58ff9286cf494abaca5db8652e33ee4a6e7ecb40efbf945088"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11aff145aacfae92b941b95ec5943fb62ea241ec2225b8ecefc4cadadf699893"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-win32.whl", hash = "sha256:4f5f9e3dcece211dc711088a5b264e66e8198b878bdf99619a3a7c54976c118d"}, - {file = "clickhouse_connect-0.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:8268927ef8d476ef4c81d9562d049f38bc534c4d1d441e072cf8428f08ff6eaa"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f9cb2ebe0deaa78c942888aad32fa42beb4e75c2377e8784baf3d737c23e5f1"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d502b7f35008facf2774f411eed6b35010923acaac254a8c5683fdf8a11abd62"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e0f2afe464be0947947d98482eb12b25be8857ae1a31c1aaa17a67f616174d"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f2c517943eeb7663a9d42bd9b737b8ec5513ddcf58f2372f8b2074a315bae2"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa6c2b488cf9558c2b71a2599d812fe4368d5199edaa011731a8bc7bfe019751"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df9e80d0b3f5614d38026e7e2e7e7412dec942df8d765c082177879b37e678e2"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a20351fb2ae47aac1ae9b1de0585949616baedd6dbdee5272f466a2aea6ec4dd"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af40eaa20998d96198563748a6fd9796843b6f22e9e95b2136aabd917db33fff"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-win32.whl", hash = "sha256:9591a9bfa58ace467544227f83226b22a1554e2db4cfcf658f25f43c9d94e960"}, - {file = "clickhouse_connect-0.6.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b6f6159f8eddb0cad4d7e0cbad5944e97e0146ee9f416fc663f7bd3d4e9ea46"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8b941c85fe9ddd5e5edf6fc7458563d9e51ad900d95fe0b87b0458be166693a1"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c642696a758fa726c86ca624dd40acded100d79a9f4bd9f5b56ba0ea4dc44099"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57b6b36b316451c1bdc4450f9418c017af84af57d52d03cd4deb85480819a934"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17cfb1d103b47350c3ba824641fb5ba730e6e29274077a6f8975a3394a1abadb"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d52c7e7560666b93c078bf082e4ed87689fd283e6295a6d8d1dd491d4d7b6072"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0a6d498b689aa09e9d1b0051480a04ecc3509002f54bfb82998d030b4675bb24"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28c876f7a4713662af2ded7350a0262756ec4da9262bb76cc85cfe2e88015b74"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-win32.whl", hash = "sha256:74bf0a95c7c5644948be0ba9c0abcad7615b806fd2545501862526dbe684db71"}, - {file = "clickhouse_connect-0.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0aaa4194d11cb7513de69b791911ff60b3ad8b86f125446a37347208e9b9ae6d"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b873d138dfedbe761f2d66ad1257ea253394c4f8dcffd6ff34dfb990f13a18b"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7140705d05a05ac39eecf86727ab55985e5dba9d1734df8921cc417853a18b7f"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69085fa0f4e5da5cef4ae5249e19f10d91e57ae78628e49e8853b71b6003dbae"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6ec081d87cc37be3ecf60b88002c58add76a72b4124525cb5cd28539e7d488"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe04eb239b72bc9fa4f1999cd292f82af507cbe1f07546f26a3332c50a294b"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:244bbf7ad92f1f030378412358c47cd377aa6d469b548dba2406a7894c8da2ab"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:69e91bdb25166b6fa4eb55601d86fa57dee82070bce9b97a858c8973615ab8b8"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d2627c8a9625e1c9058cfb5b231a0d0180ed9215d901b601d367de598f27a90d"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-win32.whl", hash = "sha256:87fb937b34b561703eaba5781404736120bab691f4525096d5dfb4b99d4890a6"}, - {file = "clickhouse_connect-0.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:366c5765e6b7863b3a8d565d5a3b27f9f8731f6f4b016048fa172c6ad6485594"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c1b0d8bee6399f5b68bb0832fae51fd0f5e4bcb539bae2df36d8433b6e38a0b"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f7e3ead1429ec82b9cd0cf7b807bacf69d895042f75276f63d732378344376"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36df02ebfbfa4dbe3667bf5b3402ff0193d0f682b9aa09d71469c15745473d8e"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa636b0cbbff52c9fafe287d1d818fc9947feaa840c951b8bfd8f8d4d1ee45a0"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4968b6b48baae43d62c241bee9e1c8f680ee3d054254e3959c2d2fb7d370ee"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a4156de52fe1f9b19f8c3a820d57c012a55644c56a87c8d31ecff89115959d60"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fccbe34878e6202ff5715284cbe57e748d36f4c8ad6217f9c80f84a086013fb9"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:70bfe48c0e4340ccf234b691fbd52f32db74649cb84ca28b98a211cc3e30b30c"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-win32.whl", hash = "sha256:9f80b64e2268293a918721e1c122c54e2a1592bb74824fdd70e9add9fbcea31a"}, - {file = "clickhouse_connect-0.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:04a5030b76ee930b18eb3aeb7847146c2fa29da0feb0ec7dd3a0564a3de944f1"}, - {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:75e84c827c8180d5dc66b0e99dba422a3ffd2c7d8ee5ba80e00b9c942dff8a36"}, - {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e627061336142d02e9c900a96bcd87372e88f05755bf19b158e68472b99a921"}, - {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:194f72e8f3f24c207aa87113b8d11674dab12b35232fd8b7b19b97257796be45"}, - {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf755b46089ee6a7f1ab3e24fc6fbacefc54cfefceb0ed81ebf198abf6937dac"}, - {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:39e58756a13872a24304b1987fafb7d5112ea88469eb55303b1183ebdd7a0be5"}, - {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e29de1264ffa26eb822e57c5715974c9818ae8e16bb114e54352d66947cdf7f"}, - {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74ed74427aaf10d2e8f7697b8ec53479f6068287ea695a5f3d3927db40be3c3"}, - {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc910b0f6c93d0d703809fd92cf19b71dcaf8c6d5f328deddae1709061a0aa2"}, - {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b17236e08da8b5d737ccd983db56a2d2222955a49c4b312b12e4a2b4a06c9b"}, - {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d4d76560d0ce84d0ba550918433dd1f8da6983edabe2685cd84679cd7a90c179"}, - {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:815bd0d5f40174716ffdf1adab066cd0e36c82c81b227224fb7281bdf8734eb6"}, - {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82abd319ba51e0c5c2d123e2cf30b1604b0d46f4de694096aa911ddd63701f60"}, - {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3eea5dac3a7cd52523b556ecd05940c4710c96b6e39ec5a05ed7859bddc7f6"}, - {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bbc28cdf903b4b2805199ce7d4580814a8b9bb4766ddd835cab46a81e6fcd63"}, - {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5fc4deda5a97e672135b4330d81109b443266aa948b09a24a02db58c0fc96bc1"}, -] - -[package.dependencies] -certifi = "*" -importlib-metadata = "*" -lz4 = "*" -pytz = "*" -urllib3 = ">=1.26" -zstandard = "*" - -[package.extras] -arrow = ["pyarrow"] -numpy = ["numpy"] -orjson = ["orjson"] -pandas = ["pandas"] -sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] - [[package]] name = "cmake" -version = "3.26.4" +version = "3.27.0" description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software" -category = "dev" optional = false python-versions = "*" files = [ - {file = "cmake-3.26.4-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:230227bf99f36614de84cdc92ffce3a50eb2803020e946f8da945a08fcf766bf"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:248a90816abfc10ff6e1109b54b8235c3e62f0ac92da16541753deb3b5ae063d"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:1b92f9f59f48c803106dbdd6750b0f571a0500e25d3a62c42ba84bb7a9240d10"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3175442985558d5415b97f264a6a1bb0af5ecfe10e3f7510257b1ea66bd33848"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1d887be5f1a3f17559a78707a6bc0560f4f8cb93cebb9d823d90a63e68bae09b"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:235d8eac93a28dcce5a1cd7130412885a2aa53d5735cb2230e0f26f589347b65"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:05cfd76c637eb22058c95e2dc383cadd4e0615e2643e637bb498a6cc24825790"}, - {file = "cmake-3.26.4-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:93015da6f1c0e1e5f2debf752f1803ea52d742d915ad674043d36e471f937507"}, - {file = "cmake-3.26.4-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:d726671ae7ae4aa6989e73d26b9f8f8e6af45163a26ea243949d72246566fdd8"}, - {file = "cmake-3.26.4-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:432837364aa6cab2826a72e8a4cdd3586f5ac9ce495217ccd59aa70f2bba8120"}, - {file = "cmake-3.26.4-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:24110035aff586a04a6a6fcf4609270642e4f503c0620c962dff75b653f81414"}, - {file = "cmake-3.26.4-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:3e280e81713408987b7053f5b922c9f94e45668ca6efff1f02846309ca0b5b0f"}, - {file = "cmake-3.26.4-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:c3b0e72750c0f6c0373242c1299bc4ffdbebdd5004966ae6df0b2e9845aa6990"}, - {file = "cmake-3.26.4-py2.py3-none-win32.whl", hash = "sha256:e058e59154a1e490fb9425b420f87e28144292397607638d73e323509f7efae6"}, - {file = "cmake-3.26.4-py2.py3-none-win_amd64.whl", hash = "sha256:b7a6946c345497c14064e0c9585b30f5aaebbefdfc0b245b6bb5a978eb4fc85f"}, - {file = "cmake-3.26.4-py2.py3-none-win_arm64.whl", hash = "sha256:93a03bad17b9741acaff4a8651f8596496506602fa123e70fe67142f1b21ee2e"}, - {file = "cmake-3.26.4.tar.gz", hash = "sha256:d45b30b9ce7280829888c78650177ab525df2b6785e1a5b3d82b4c147d828c0e"}, + {file = "cmake-3.27.0-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:9ccab4cd93578d3c2df32e66b44b313b75a7484032645040431dc06a583ca4aa"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:199bfaefb752e82d8067aeee5d6a6e0414fe0d60e9a3fd08e95d537a97e0db16"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:8745eff805f36762d3e8e904698b853cb4a9da8b4b07d1c12bcd1e1a6c4a1709"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58a3f39d3d1bc897f05e531bfa676246a2b25d424c6a47e4b6bbc193fb560db7"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b470ccd3f86cf19a63f6b221c9cceebcc58e32d3787d0d5f9f43d1d91a095090"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:35a8d397ce883e93b5e6561e2803ce9470df52283862264093c1078530f98189"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:1f38d87b2c65763a0113f4a6c652e6f4b5adf90b384c1e1d69e4f8a3104a57d6"}, + {file = "cmake-3.27.0-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b9d5811954dcedcaa6c915c4a9bb6d64b55ac189e9cbc74be726307d9d084804"}, + {file = "cmake-3.27.0-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:073e4f196d0888216e6794c08cd984ddabc108c0e4e66f48fbd7610d1e6d726d"}, + {file = "cmake-3.27.0-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:e58e48643903e6fad76274337f9a4d3c575b8e21cd05c6214780b2c98bb0c706"}, + {file = "cmake-3.27.0-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:9740ed9f61a3bd8708a41cadd5c057c04f38e5b89bd773e369df2e210a1c55a3"}, + {file = "cmake-3.27.0-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:1b3189171665f5c8d748ae7fe10a29fff1ebeedeaef57b16f1ea54b1ec0fe514"}, + {file = "cmake-3.27.0-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:c4c968c188e7518deb463a14e64f3a19f242c9dcf7f24e1dbcc1419690cd54e0"}, + {file = "cmake-3.27.0-py2.py3-none-win32.whl", hash = "sha256:5561aca62b65aac844f3931e74cfeb696e4534de145e3307bf942e735736541e"}, + {file = "cmake-3.27.0-py2.py3-none-win_amd64.whl", hash = "sha256:48be3afe62c9513a49be007896a4058fafec512cb1f269a50126da30aacad97f"}, + {file = "cmake-3.27.0-py2.py3-none-win_arm64.whl", hash = "sha256:6f46a170b0c9c552d52da4346534570f818195dfc4f1d0c03264e24cc348fc60"}, + {file = "cmake-3.27.0.tar.gz", hash = "sha256:d03f0a76a2b96805044ad1178b92aeeb5f695caa6776a32522bb5c430a55b4e8"}, ] [package.extras] @@ -693,7 +661,6 @@ test = ["coverage (>=4.2)", "flake8 (>=3.0.4)", "path.py (>=11.5.0)", "pytest (> name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -705,7 +672,6 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -723,7 +689,6 @@ cron = ["capturer (>=2.4)"] name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -743,7 +708,6 @@ typing = ["mypy (>=0.990)"] name = "cryptography" version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -789,7 +753,6 @@ test-randomorder = ["pytest-randomly"] name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -817,7 +780,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -827,104 +789,63 @@ files = [ [[package]] name = "distlib" -version = "0.3.6" +version = "0.3.7" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] [[package]] name = "dnspython" -version = "2.3.0" +version = "2.4.0" description = "DNS toolkit" -category = "dev" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<4.0" files = [ - {file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"}, - {file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"}, + {file = "dnspython-2.4.0-py3-none-any.whl", hash = "sha256:46b4052a55b56beea3a3bdd7b30295c292bd6827dd442348bc116f2d35b17f0a"}, + {file = "dnspython-2.4.0.tar.gz", hash = "sha256:758e691dbb454d5ccf4e1b154a19e52847f79e21a42fef17b969144af29a4e6c"}, ] +[package.dependencies] +httpcore = {version = ">=0.17.3", markers = "python_version >= \"3.8\""} +sniffio = ">=1.1,<2.0" + [package.extras] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<40.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"] +dnssec = ["cryptography (>=2.6,<42.0)"] +doh = ["h2 (>=4.1.0)", "httpx (>=0.24.1)"] doq = ["aioquic (>=0.9.20)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.23)"] wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] -name = "duckdb" -version = "0.8.1" -description = "DuckDB embedded database" -category = "dev" +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:14781d21580ee72aba1f5dcae7734674c9b6c078dd60470a08b2b420d15b996d"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13bf7ab0e56ddd2014ef762ae4ee5ea4df5a69545ce1191b8d7df8118ba3167"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4032042d8363e55365bbca3faafc6dc336ed2aad088f10ae1a534ebc5bcc181"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a71bd8f0b0ca77c27fa89b99349ef22599ffefe1e7684ae2e1aa2904a08684"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24568d6e48f3dbbf4a933109e323507a46b9399ed24c5d4388c4987ddc694fd0"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297226c0dadaa07f7c5ae7cbdb9adba9567db7b16693dbd1b406b739ce0d7924"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5792cf777ece2c0591194006b4d3e531f720186102492872cb32ddb9363919cf"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:12803f9f41582b68921d6b21f95ba7a51e1d8f36832b7d8006186f58c3d1b344"}, - {file = "duckdb-0.8.1-cp310-cp310-win32.whl", hash = "sha256:d0953d5a2355ddc49095e7aef1392b7f59c5be5cec8cdc98b9d9dc1f01e7ce2b"}, - {file = "duckdb-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e6583c98a7d6637e83bcadfbd86e1f183917ea539f23b6b41178f32f813a5eb"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fad7ed0d4415f633d955ac24717fa13a500012b600751d4edb050b75fb940c25"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ae602f34d38d9c48dd60f94b89f28df3ef346830978441b83c5b4eae131d08"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d75cfe563aaa058d3b4ccaaa371c6271e00e3070df5de72361fd161b2fe6780"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbb55e7a3336f2462e5e916fc128c47fe1c03b6208d6bd413ac11ed95132aa0"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6df53efd63b6fdf04657385a791a4e3c4fb94bfd5db181c4843e2c46b04fef5"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b188b80b70d1159b17c9baaf541c1799c1ce8b2af4add179a9eed8e2616be96"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ad481ee353f31250b45d64b4a104e53b21415577943aa8f84d0af266dc9af85"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1d1b1729993611b1892509d21c21628917625cdbe824a61ce891baadf684b32"}, - {file = "duckdb-0.8.1-cp311-cp311-win32.whl", hash = "sha256:2d8f9cc301e8455a4f89aa1088b8a2d628f0c1f158d4cf9bc78971ed88d82eea"}, - {file = "duckdb-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:07457a43605223f62d93d2a5a66b3f97731f79bbbe81fdd5b79954306122f612"}, - {file = "duckdb-0.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2c8062c3e978dbcd80d712ca3e307de8a06bd4f343aa457d7dd7294692a3842"}, - {file = "duckdb-0.8.1-cp36-cp36m-win32.whl", hash = "sha256:fad486c65ae944eae2de0d590a0a4fb91a9893df98411d66cab03359f9cba39b"}, - {file = "duckdb-0.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:86fa4506622c52d2df93089c8e7075f1c4d0ba56f4bf27faebde8725355edf32"}, - {file = "duckdb-0.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:60e07a62782f88420046e30cc0e3de842d0901c4fd5b8e4d28b73826ec0c3f5e"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18563675977f8cbf03748efee0165b4c8ef64e0cbe48366f78e2914d82138bb"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16e179443832bea8439ae4dff93cf1e42c545144ead7a4ef5f473e373eea925a"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a413d5267cb41a1afe69d30dd6d4842c588256a6fed7554c7e07dad251ede095"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3784680df59eadd683b0a4c2375d451a64470ca54bd171c01e36951962b1d332"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:67a1725c2b01f9b53571ecf3f92959b652f60156c1c48fb35798302e39b3c1a2"}, - {file = "duckdb-0.8.1-cp37-cp37m-win32.whl", hash = "sha256:197d37e2588c5ad063e79819054eedb7550d43bf1a557d03ba8f8f67f71acc42"}, - {file = "duckdb-0.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3843feb79edf100800f5037c32d5d5a5474fb94b32ace66c707b96605e7c16b2"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:624c889b0f2d656794757b3cc4fc58030d5e285f5ad2ef9fba1ea34a01dab7fb"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fcbe3742d77eb5add2d617d487266d825e663270ef90253366137a47eaab9448"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47516c9299d09e9dbba097b9fb339b389313c4941da5c54109df01df0f05e78c"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf1ba718b7522d34399446ebd5d4b9fcac0b56b6ac07bfebf618fd190ec37c1d"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e36e35d38a9ae798fe8cf6a839e81494d5b634af89f4ec9483f4d0a313fc6bdb"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23493313f88ce6e708a512daacad13e83e6d1ea0be204b175df1348f7fc78671"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1fb9bf0b6f63616c8a4b9a6a32789045e98c108df100e6bac783dc1e36073737"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12fc13ecd5eddd28b203b9e3999040d3a7374a8f4b833b04bd26b8c5685c2635"}, - {file = "duckdb-0.8.1-cp38-cp38-win32.whl", hash = "sha256:a12bf4b18306c9cb2c9ba50520317e6cf2de861f121d6f0678505fa83468c627"}, - {file = "duckdb-0.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e4e809358b9559c00caac4233e0e2014f3f55cd753a31c4bcbbd1b55ad0d35e4"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7acedfc00d97fbdb8c3d120418c41ef3cb86ef59367f3a9a30dff24470d38680"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:99bfe264059cdc1e318769103f656f98e819cd4e231cd76c1d1a0327f3e5cef8"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:538b225f361066231bc6cd66c04a5561de3eea56115a5dd773e99e5d47eb1b89"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae0be3f71a18cd8492d05d0fc1bc67d01d5a9457b04822d025b0fc8ee6efe32e"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd82ba63b58672e46c8ec60bc9946aa4dd7b77f21c1ba09633d8847ad9eb0d7b"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:780a34559aaec8354e83aa4b7b31b3555f1b2cf75728bf5ce11b89a950f5cdd9"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f0d4e9f7103523672bda8d3f77f440b3e0155dd3b2f24997bc0c77f8deb460"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31f692decb98c2d57891da27180201d9e93bb470a3051fcf413e8da65bca37a5"}, - {file = "duckdb-0.8.1-cp39-cp39-win32.whl", hash = "sha256:e7fe93449cd309bbc67d1bf6f6392a6118e94a9a4479ab8a80518742e855370a"}, - {file = "duckdb-0.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:81d670bc6807672f038332d9bf587037aabdd741b0810de191984325ed307abd"}, - {file = "duckdb-0.8.1.tar.gz", hash = "sha256:a54d37f4abc2afc4f92314aaa56ecf215a411f40af4bffe1e86bd25e62aceee9"}, + {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, + {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, ] +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] + [[package]] name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -939,7 +860,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -952,31 +872,27 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "fastapi" -version = "0.85.1" +version = "0.99.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "fastapi-0.85.1-py3-none-any.whl", hash = "sha256:de3166b6b1163dc22da4dc4ebdc3192fcbac7700dd1870a1afa44de636a636b5"}, - {file = "fastapi-0.85.1.tar.gz", hash = "sha256:1facd097189682a4ff11cbd01334a992e51b56be663b2bd50c2c09523624f144"}, + {file = "fastapi-0.99.1-py3-none-any.whl", hash = "sha256:976df7bab51ac7beda9f68c4513b8c4490b5c1135c72aafd0a5ee4023ec5282e"}, + {file = "fastapi-0.99.1.tar.gz", hash = "sha256:ac78f717cd80d657bd183f94d33b9bda84aa376a46a9dab513586b8eef1dc6fc"}, ] [package.dependencies] -pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = "0.20.4" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = ">=0.27.0,<0.28.0" +typing-extensions = ">=4.5.0" [package.extras] -all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] -dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] -doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.7.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-orjson (==3.6.2)", "types-ujson (==5.4.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -992,7 +908,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1002,93 +917,78 @@ files = [ [[package]] name = "frozenlist" -version = "1.3.3" +version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, ] [[package]] name = "fsspec" version = "2023.6.0" description = "File-system specification" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1124,7 +1024,6 @@ tqdm = ["tqdm"] name = "graphlib-backport" version = "1.0.3" description = "Backport of the Python 3.9 graphlib module for Python 3.6+" -category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -1132,11 +1031,67 @@ files = [ {file = "graphlib_backport-1.0.3.tar.gz", hash = "sha256:7bb8fc7757b8ae4e6d8000a26cd49e9232aaa9a3aa57edb478474b8424bfaae2"}, ] +[[package]] +name = "grpcio" +version = "1.56.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.56.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:fb34ace11419f1ae321c36ccaa18d81cd3f20728cd191250be42949d6845bb2d"}, + {file = "grpcio-1.56.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:008767c0aed4899e657b50f2e0beacbabccab51359eba547f860e7c55f2be6ba"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:17f47aeb9be0da5337f9ff33ebb8795899021e6c0741ee68bd69774a7804ca86"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43c50d810cc26349b093bf2cfe86756ab3e9aba3e7e681d360930c1268e1399a"}, + {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187b8f71bad7d41eea15e0c9812aaa2b87adfb343895fffb704fb040ca731863"}, + {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:881575f240eb5db72ddca4dc5602898c29bc082e0d94599bf20588fb7d1ee6a0"}, + {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c243b158dd7585021d16c50498c4b2ec0a64a6119967440c5ff2d8c89e72330e"}, + {file = "grpcio-1.56.0-cp310-cp310-win32.whl", hash = "sha256:8b3b2c7b5feef90bc9a5fa1c7f97637e55ec3e76460c6d16c3013952ee479cd9"}, + {file = "grpcio-1.56.0-cp310-cp310-win_amd64.whl", hash = "sha256:03a80451530fd3b8b155e0c4480434f6be669daf7ecba56f73ef98f94222ee01"}, + {file = "grpcio-1.56.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:64bd3abcf9fb4a9fa4ede8d0d34686314a7075f62a1502217b227991d9ca4245"}, + {file = "grpcio-1.56.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:fdc3a895791af4addbb826808d4c9c35917c59bb5c430d729f44224e51c92d61"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:4f84a6fd4482e5fe73b297d4874b62a535bc75dc6aec8e9fe0dc88106cd40397"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14e70b4dda3183abea94c72d41d5930c333b21f8561c1904a372d80370592ef3"}, + {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5ce42a5ebe3e04796246ba50357f1813c44a6efe17a37f8dc7a5c470377312"}, + {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8219f17baf069fe8e42bd8ca0b312b875595e43a70cabf397be4fda488e2f27d"}, + {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:defdd14b518e6e468466f799aaa69db0355bca8d3a5ea75fb912d28ba6f8af31"}, + {file = "grpcio-1.56.0-cp311-cp311-win32.whl", hash = "sha256:50f4daa698835accbbcc60e61e0bc29636c0156ddcafb3891c987e533a0031ba"}, + {file = "grpcio-1.56.0-cp311-cp311-win_amd64.whl", hash = "sha256:59c4e606993a47146fbeaf304b9e78c447f5b9ee5641cae013028c4cca784617"}, + {file = "grpcio-1.56.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b1f4b6f25a87d80b28dd6d02e87d63fe1577fe6d04a60a17454e3f8077a38279"}, + {file = "grpcio-1.56.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:c2148170e01d464d41011a878088444c13413264418b557f0bdcd1bf1b674a0e"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:0409de787ebbf08c9d2bca2bcc7762c1efe72eada164af78b50567a8dfc7253c"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66f0369d27f4c105cd21059d635860bb2ea81bd593061c45fb64875103f40e4a"}, + {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38fdf5bd0a1c754ce6bf9311a3c2c7ebe56e88b8763593316b69e0e9a56af1de"}, + {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:79d4c5911d12a7aa671e5eb40cbb50a830396525014d2d6f254ea2ba180ce637"}, + {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5d2fc471668a7222e213f86ef76933b18cdda6a51ea1322034478df8c6519959"}, + {file = "grpcio-1.56.0-cp37-cp37m-win_amd64.whl", hash = "sha256:991224fd485e088d3cb5e34366053691a4848a6b7112b8f5625a411305c26691"}, + {file = "grpcio-1.56.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c6f36621aabecbaff3e70c4d1d924c76c8e6a7ffec60c331893640a4af0a8037"}, + {file = "grpcio-1.56.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:1eadd6de258901929223f422ffed7f8b310c0323324caf59227f9899ea1b1674"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:72836b5a1d4f508ffbcfe35033d027859cc737972f9dddbe33fb75d687421e2e"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f92a99ab0c7772fb6859bf2e4f44ad30088d18f7c67b83205297bfb229e0d2cf"}, + {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa08affbf672d051cd3da62303901aeb7042a2c188c03b2c2a2d346fc5e81c14"}, + {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2db108b4c8e29c145e95b0226973a66d73ae3e3e7fae00329294af4e27f1c42"}, + {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8674fdbd28266d8efbcddacf4ec3643f76fe6376f73283fd63a8374c14b0ef7c"}, + {file = "grpcio-1.56.0-cp38-cp38-win32.whl", hash = "sha256:bd55f743e654fb050c665968d7ec2c33f03578a4bbb163cfce38024775ff54cc"}, + {file = "grpcio-1.56.0-cp38-cp38-win_amd64.whl", hash = "sha256:c63bc5ac6c7e646c296fed9139097ae0f0e63f36f0864d7ce431cce61fe0118a"}, + {file = "grpcio-1.56.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c0bc9dda550785d23f4f025be614b7faa8d0293e10811f0f8536cf50435b7a30"}, + {file = "grpcio-1.56.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:d596408bab632ec7b947761e83ce6b3e7632e26b76d64c239ba66b554b7ee286"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76b6e6e1ee9bda32e6e933efd61c512e9a9f377d7c580977f090d1a9c78cca44"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7beb84ebd0a3f732625124b73969d12b7350c5d9d64ddf81ae739bbc63d5b1ed"}, + {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83ec714bbbe9b9502177c842417fde39f7a267031e01fa3cd83f1ca49688f537"}, + {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4feee75565d1b5ab09cb3a5da672b84ca7f6dd80ee07a50f5537207a9af543a4"}, + {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b4638a796778329cc8e142e4f57c705adb286b3ba64e00b0fa91eeb919611be8"}, + {file = "grpcio-1.56.0-cp39-cp39-win32.whl", hash = "sha256:437af5a7673bca89c4bc0a993382200592d104dd7bf55eddcd141cef91f40bab"}, + {file = "grpcio-1.56.0-cp39-cp39-win_amd64.whl", hash = "sha256:4241a1c2c76e748023c834995cd916570e7180ee478969c2d79a60ce007bc837"}, + {file = "grpcio-1.56.0.tar.gz", hash = "sha256:4c08ee21b3d10315b8dc26f6c13917b20ed574cdbed2d2d80c53d5508fdcc0f2"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.56.0)"] + [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1145,24 +1100,30 @@ files = [ ] [[package]] -name = "hnswlib" -version = "0.7.0" -description = "hnswlib" -category = "dev" +name = "httpcore" +version = "0.17.3" +description = "A minimal low-level HTTP client." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "hnswlib-0.7.0.tar.gz", hash = "sha256:bc459668e7e44bb7454b256b90c98c5af750653919d9a91698dafcf416cf64c4"}, + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, ] [package.dependencies] -numpy = "*" +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] [[package]] name = "httptools" version = "0.6.0" description = "A collection of framework independent HTTP protocol utils." -category = "dev" optional = false python-versions = ">=3.5.0" files = [ @@ -1210,7 +1171,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "huggingface-hub" version = "0.16.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1243,7 +1203,6 @@ typing = ["pydantic", "types-PyYAML", "types-requests", "types-simplejson", "typ name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1256,14 +1215,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.5.24" +version = "2.5.25" description = "File identification library for Python" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, - {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, + {file = "identify-2.5.25-py2.py3-none-any.whl", hash = "sha256:9df2489842707d431b38ce3410ef8df40da5b10a3e28a3fcac1a42523e956409"}, + {file = "identify-2.5.25.tar.gz", hash = "sha256:db4de0e758c0db8f81996816cd2f3f2f8c5c8d49a7fd02f3b4109aac6fd80e29"}, ] [package.extras] @@ -1273,7 +1231,6 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1285,7 +1242,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1301,11 +1257,28 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +[[package]] +name = "importlib-resources" +version = "6.0.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, + {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1317,7 +1290,6 @@ files = [ name = "ipykernel" version = "6.24.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1331,7 +1303,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1351,7 +1323,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1387,11 +1358,24 @@ qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + [[package]] name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1411,7 +1395,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1429,7 +1412,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.3.1" description = "Lightweight pipelining with Python functions" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1441,7 +1423,6 @@ files = [ name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1451,7 +1432,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1465,7 +1446,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1486,7 +1466,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "lit" version = "16.0.6" description = "A Software Testing Tool" -category = "dev" optional = false python-versions = "*" files = [ @@ -1497,7 +1476,6 @@ files = [ name = "loguru" version = "0.7.0" description = "Python logging made (stupidly) simple" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1512,61 +1490,10 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] -[[package]] -name = "lz4" -version = "4.3.2" -description = "LZ4 Bindings for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "lz4-4.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c4c100d99eed7c08d4e8852dd11e7d1ec47a3340f49e3a96f8dfbba17ffb300"}, - {file = "lz4-4.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edd8987d8415b5dad25e797043936d91535017237f72fa456601be1479386c92"}, - {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7c50542b4ddceb74ab4f8b3435327a0861f06257ca501d59067a6a482535a77"}, - {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5614d8229b33d4a97cb527db2a1ac81308c6e796e7bdb5d1309127289f69d5"}, - {file = "lz4-4.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f00a9ba98f6364cadda366ae6469b7b3568c0cced27e16a47ddf6b774169270"}, - {file = "lz4-4.3.2-cp310-cp310-win32.whl", hash = "sha256:b10b77dc2e6b1daa2f11e241141ab8285c42b4ed13a8642495620416279cc5b2"}, - {file = "lz4-4.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:86480f14a188c37cb1416cdabacfb4e42f7a5eab20a737dac9c4b1c227f3b822"}, - {file = "lz4-4.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c2df117def1589fba1327dceee51c5c2176a2b5a7040b45e84185ce0c08b6a3"}, - {file = "lz4-4.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f25eb322eeb24068bb7647cae2b0732b71e5c639e4e4026db57618dcd8279f0"}, - {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8df16c9a2377bdc01e01e6de5a6e4bbc66ddf007a6b045688e285d7d9d61d1c9"}, - {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f571eab7fec554d3b1db0d666bdc2ad85c81f4b8cb08906c4c59a8cad75e6e22"}, - {file = "lz4-4.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7211dc8f636ca625abc3d4fb9ab74e5444b92df4f8d58ec83c8868a2b0ff643d"}, - {file = "lz4-4.3.2-cp311-cp311-win32.whl", hash = "sha256:867664d9ca9bdfce840ac96d46cd8838c9ae891e859eb98ce82fcdf0e103a947"}, - {file = "lz4-4.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a6a46889325fd60b8a6b62ffc61588ec500a1883db32cddee9903edfba0b7584"}, - {file = "lz4-4.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a85b430138882f82f354135b98c320dafb96fc8fe4656573d95ab05de9eb092"}, - {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d5c93f8badacfa0456b660285e394e65023ef8071142e0dcbd4762166e1be0"}, - {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b50f096a6a25f3b2edca05aa626ce39979d63c3b160687c8c6d50ac3943d0ba"}, - {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200d05777d61ba1ff8d29cb51c534a162ea0b4fe6d3c28be3571a0a48ff36080"}, - {file = "lz4-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:edc2fb3463d5d9338ccf13eb512aab61937be50aa70734bcf873f2f493801d3b"}, - {file = "lz4-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:83acfacab3a1a7ab9694333bcb7950fbeb0be21660d236fd09c8337a50817897"}, - {file = "lz4-4.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a9eec24ec7d8c99aab54de91b4a5a149559ed5b3097cf30249b665689b3d402"}, - {file = "lz4-4.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31d72731c4ac6ebdce57cd9a5cabe0aecba229c4f31ba3e2c64ae52eee3fdb1c"}, - {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83903fe6db92db0be101acedc677aa41a490b561567fe1b3fe68695b2110326c"}, - {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:926b26db87ec8822cf1870efc3d04d06062730ec3279bbbd33ba47a6c0a5c673"}, - {file = "lz4-4.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e05afefc4529e97c08e65ef92432e5f5225c0bb21ad89dee1e06a882f91d7f5e"}, - {file = "lz4-4.3.2-cp38-cp38-win32.whl", hash = "sha256:ad38dc6a7eea6f6b8b642aaa0683253288b0460b70cab3216838747163fb774d"}, - {file = "lz4-4.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:7e2dc1bd88b60fa09b9b37f08553f45dc2b770c52a5996ea52b2b40f25445676"}, - {file = "lz4-4.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:edda4fb109439b7f3f58ed6bede59694bc631c4b69c041112b1b7dc727fffb23"}, - {file = "lz4-4.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ca83a623c449295bafad745dcd399cea4c55b16b13ed8cfea30963b004016c9"}, - {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5ea0e788dc7e2311989b78cae7accf75a580827b4d96bbaf06c7e5a03989bd5"}, - {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a98b61e504fb69f99117b188e60b71e3c94469295571492a6468c1acd63c37ba"}, - {file = "lz4-4.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4931ab28a0d1c133104613e74eec1b8bb1f52403faabe4f47f93008785c0b929"}, - {file = "lz4-4.3.2-cp39-cp39-win32.whl", hash = "sha256:ec6755cacf83f0c5588d28abb40a1ac1643f2ff2115481089264c7630236618a"}, - {file = "lz4-4.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:4caedeb19e3ede6c7a178968b800f910db6503cb4cb1e9cc9221157572139b49"}, - {file = "lz4-4.3.2.tar.gz", hash = "sha256:e1431d84a9cfb23e6773e72078ce8e65cad6745816d4cbf9ae67da5ea419acda"}, -] - -[package.extras] -docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] -flake8 = ["flake8"] -tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] - [[package]] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1622,11 +1549,30 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "marshmallow" +version = "3.19.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.7" +files = [ + {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, + {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.3.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.10.25)", "mypy (==0.990)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + [[package]] name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1637,11 +1583,26 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "milvus" +version = "2.2.11" +description = "Embeded Milvus" +optional = false +python-versions = ">=3.6" +files = [ + {file = "milvus-2.2.11-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:64fa0fcbce1cb763d3aac0749cc17e04761e832297eae12ba5c97938f1acd243"}, + {file = "milvus-2.2.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c439d4231019e8cb78b13572dcd78a388cb63a5c271a2ab059bb54f019b1eb1c"}, + {file = "milvus-2.2.11-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d124cf7d6f914177ba14fb38c6a4ea305e3b6a8a09a86e7fc80f44270c0f6ede"}, + {file = "milvus-2.2.11-py3-none-win_amd64.whl", hash = "sha256:118569f56584670f8b1b7b4c89c0050b4678884b4719b8659edb1d47f12bd177"}, +] + +[package.extras] +client = ["pymilvus (>=2.2.0,<2.3.0)"] + [[package]] name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "dev" optional = false python-versions = "*" files = [ @@ -1653,7 +1614,6 @@ files = [ name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" -category = "dev" optional = false python-versions = "*" files = [ @@ -1667,11 +1627,47 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] +[[package]] +name = "msal" +version = "1.22.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = false +python-versions = "*" +files = [ + {file = "msal-1.22.0-py2.py3-none-any.whl", hash = "sha256:9120b7eafdf061c92f7b3d744e5f325fca35873445fa8ffebb40b1086a13dd58"}, + {file = "msal-1.22.0.tar.gz", hash = "sha256:8a82f5375642c1625c89058018430294c109440dce42ea667d466c2cab520acd"}, +] + +[package.dependencies] +cryptography = ">=0.6,<43" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.13.2,<0.14)"] + +[[package]] +name = "msal-extensions" +version = "1.0.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = false +python-versions = "*" +files = [ + {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, + {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, +] + +[package.dependencies] +msal = ">=0.4.1,<2.0.0" +portalocker = [ + {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, +] + [[package]] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1755,7 +1751,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1767,7 +1762,6 @@ files = [ name = "nest-asyncio" version = "1.5.6" description = "Patch asyncio to allow nested event loops" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1779,7 +1773,6 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1798,7 +1791,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nltk" version = "3.8.1" description = "Natural Language Toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1824,7 +1816,6 @@ twitter = ["twython"] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1839,7 +1830,6 @@ setuptools = "*" name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1877,7 +1867,6 @@ files = [ name = "nvidia-cublas-cu11" version = "11.10.3.66" description = "CUBLAS native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1893,7 +1882,6 @@ wheel = "*" name = "nvidia-cuda-cupti-cu11" version = "11.7.101" description = "CUDA profiling tools runtime libs." -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1909,7 +1897,6 @@ wheel = "*" name = "nvidia-cuda-nvrtc-cu11" version = "11.7.99" description = "NVRTC native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1926,7 +1913,6 @@ wheel = "*" name = "nvidia-cuda-runtime-cu11" version = "11.7.99" description = "CUDA Runtime native Libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1942,7 +1928,6 @@ wheel = "*" name = "nvidia-cudnn-cu11" version = "8.5.0.96" description = "cuDNN runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1958,7 +1943,6 @@ wheel = "*" name = "nvidia-cufft-cu11" version = "10.9.0.58" description = "CUFFT native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1970,7 +1954,6 @@ files = [ name = "nvidia-curand-cu11" version = "10.2.10.91" description = "CURAND native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -1986,7 +1969,6 @@ wheel = "*" name = "nvidia-cusolver-cu11" version = "11.4.0.1" description = "CUDA solver native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -2003,7 +1985,6 @@ wheel = "*" name = "nvidia-cusparse-cu11" version = "11.7.4.91" description = "CUSPARSE native runtime libraries" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -2019,7 +2000,6 @@ wheel = "*" name = "nvidia-nccl-cu11" version = "2.14.3" description = "NVIDIA Collective Communication Library (NCCL) Runtime" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -2030,7 +2010,6 @@ files = [ name = "nvidia-nvtx-cu11" version = "11.7.91" description = "NVIDIA Tools Extension" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -2046,7 +2025,6 @@ wheel = "*" name = "onnxruntime" version = "1.15.1" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -category = "dev" optional = false python-versions = "*" files = [ @@ -2088,7 +2066,6 @@ sympy = "*" name = "openai" version = "0.27.8" description = "Python client library for the OpenAI API" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -2103,7 +2080,7 @@ tqdm = "*" [package.extras] datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] @@ -2111,7 +2088,6 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1 name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2123,7 +2099,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2135,7 +2110,6 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2203,7 +2177,6 @@ xml = ["lxml (>=4.6.3)"] name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2219,7 +2192,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2231,7 +2203,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -2246,7 +2217,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -2258,7 +2228,6 @@ files = [ name = "pillow" version = "10.0.0" description = "Python Imaging Library (Fork)" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2280,6 +2249,7 @@ files = [ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, + {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, @@ -2289,6 +2259,7 @@ files = [ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, + {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, @@ -2326,7 +2297,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pinecone-client" version = "2.2.2" description = "Pinecone client and SDK" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2350,14 +2320,13 @@ grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv [[package]] name = "platformdirs" -version = "3.8.1" +version = "3.9.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, - {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, + {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, + {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, ] [package.extras] @@ -2368,7 +2337,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2380,11 +2348,29 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "portalocker" +version = "2.7.0" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.5" +files = [ + {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, + {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] + [[package]] name = "posthog" version = "3.0.1" description = "Integrate PostHog into any python application." -category = "dev" optional = false python-versions = "*" files = [ @@ -2408,7 +2394,6 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" name = "pre-commit" version = "3.3.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2427,7 +2412,6 @@ virtualenv = ">=20.10.0" name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2442,7 +2426,6 @@ wcwidth = "*" name = "protobuf" version = "4.23.4" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2465,7 +2448,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2492,7 +2474,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg" version = "3.1.9" description = "PostgreSQL database adapter for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2513,11 +2494,73 @@ docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)" pool = ["psycopg-pool"] test = ["anyio (>=3.6.2)", "mypy (>=1.2)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +[[package]] +name = "psycopg-binary" +version = "3.1.9" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:284038cbe3f5a0f3de417af9b5eaa2a9524a3a06211523cf245111c71b566506"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2cea4bb0b19245c83486868d7c66f73238c4caa266b5b3c3d664d10dab2ab56"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe5c5c31f59ccb1d1f473466baa93d800138186286e80e251f930e49c80d208"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82704a899d57c29beba5399d41eab5ef5c238b810d7e25e2d1916d2b34c4b1a3"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eab449e39db1c429cac79b7aa27e6827aad4995f32137e922db7254f43fed7b5"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87e0c97733b11eeca3d24e56df70f3f9d792b2abd46f48be2fb2348ffc3e7e39"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81e34d6df54329424944d5ca91b1cc77df6b8a9130cb5480680d56f53d4e485c"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e2f463079d99568a343ed0b766150b30627e9ed41de99fd82e945e7e2bec764a"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f2cbdef6568da21c39dfd45c2074e85eabbd00e1b721832ba94980f01f582dd4"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53afb0cc2ebe74651f339e22d05ec082a0f44939715d9138d357852f074fcf55"}, + {file = "psycopg_binary-3.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:09167f106e7685591b4cdf58eff0191fb7435d586f384133a0dd30df646cf409"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8aaa47c1791fc05c0229ec1003dd49e13238fba9434e1fc3b879632f749c3c4"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d91ee0d33ac7b42d0488a9be2516efa2ec00901b81d69566ff34a7a94b66c0b"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e36504373e5bcdc954b1da1c6fe66379007fe1e329790e8fb72b879a01e097"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c1def6c2d28e257325b3b208cf1966343b498282a0f4d390fda7b7e0577da64"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055537a9c20efe9bf17cb72bd879602eda71de6f737ebafa1953e017c6a37fbe"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b164355d023a91b23dcc4bb3112bc7d6e9b9c938fb5abcb6e54457d2da1f317"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03b08545ce1c627f4d5e6384eda2946660c4ba6ceb0a09ae47de07419f725669"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1e31bac3d2d41e6446b20b591f638943328c958f4d1ce13d6f1c5db97c3a8dee"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a274c63c8fb9d419509bed2ef72befc1fd04243972e17e7f5afc5725cb13a560"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98d9d156b9ada08c271a79662fc5fcc1731b4d7c1f651ef5843d818d35f15ba0"}, + {file = "psycopg_binary-3.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:c3a13aa022853891cadbc7256a9804e5989def760115c82334bddf0d19783b0b"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1a321ef3579a8de0545ade6ff1edfde0c88b8847d58c5615c03751c76054796"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5833bda4c14f24c6a8ac08d3c5712acaa4f35aab31f9ccd2265e9e9a7d0151c8"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a207d5a7f4212443b7452851c9ccd88df9c6d4d58fa2cea2ead4dd9cb328e578"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07414daa86662f7657e9fabe49af85a32a975e92e6568337887d9c9ffedc224f"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17c5d4936c746f5125c6ef9eb43655e27d4d0c9ffe34c3073878b43c3192511d"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5cdc13c8ec1437240801e43d07e27ff6479ac9dd8583ecf647345bfd2e8390e4"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3836bdaf030a5648bd5f5b452e4b068b265e28f9199060c5b70dbf4a218cde6e"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:96725d9691a84a21eb3e81c884a2e043054e33e176801a57a05e9ac38d142c6e"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dade344aa90bb0b57d1cfc13304ed83ab9a36614b8ddd671381b2de72fe1483d"}, + {file = "psycopg_binary-3.1.9-cp37-cp37m-win_amd64.whl", hash = "sha256:db866cc557d9761036771d666d17fa4176c537af7e6098f42a6bf8f64217935f"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b62545cc64dd69ea0ae5ffe18d7c97e03660ab8244aa8c5172668a21c41daa0"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:058ab0d79be0b229338f0e61fec6f475077518cba63c22c593645a69f01c3e23"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2340ca2531f69e5ebd9d18987362ba57ed6ab6a271511d8026814a46a2a87b59"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b816ce0e27a2a8786d34b61d3e36e01029245025879d64b88554326b794a4f0"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b36fe4314a784fbe45c9fd71c902b9bf57341aff9b97c0cbd22f8409a271e2f"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b246fed629482b06f938b23e9281c4af592329daa3ec2cd4a6841ccbfdeb4d68"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:90787ac05b932c0fc678cbf470ccea9c385b8077583f0490136b4569ed3fb652"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c114f678e8f4a96530fa79cfd84f65f26358ecfc6cca70cfa2d5e3ae5ef217a"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3a82e77400d1ef6c5bbcf3e600e8bdfacf1a554512f96c090c43ceca3d1ce3b6"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7d990f14a37345ca05a5192cd5ac938c9cbedca9c929872af6ae311158feb0e"}, + {file = "psycopg_binary-3.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:e0ca74fd85718723bb9f08e0c6898e901a0c365aef20b3c3a4ef8709125d6210"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce8f4dea5934aa6c4933e559c74bef4beb3413f51fbcf17f306ce890216ac33a"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f41a9e0de4db194c053bcc7c00c35422a4d19d92a8187e8065b1c560626efe35"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f94a7985135e084e122b143956c6f589d17aef743ecd0a434a3d3a222631d5a"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb86d58b90faefdc0bbedf08fdea4cc2afcb1cfa4340f027d458bfd01d8b812"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c696dc84f9ff155761df15779181d8e4af7746b98908e130add8259912e4bb7"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4213953da44324850c8f789301cf665f46fb94301ba403301e7af58546c3a428"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:25e3ce947aaaa1bd9f1920fca76d7281660646304f9ea5bc036b201dd8790655"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9c75be2a9b986139e3ff6bc0a2852081ac00811040f9b82d3aa539821311122e"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:63e8d1dbe253657c70dbfa9c59423f4654d82698fc5ed6868b8dc0765abe20b6"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f4da4ca9b2365fc1d3fc741c3bbd3efccd892ce813444b884c8911a1acf1c932"}, + {file = "psycopg_binary-3.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:c0b8d6bbeff1dba760a208d8bc205a05b745e6cee02b839f969f72cf56a8b80d"}, +] + [[package]] name = "psycopg-pool" version = "3.1.7" description = "Connection Pool for Psycopg" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2532,7 +2575,6 @@ typing-extensions = ">=3.10" name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -2544,7 +2586,6 @@ files = [ name = "pulsar-client" version = "3.2.0" description = "Apache Pulsar Python client library" -category = "dev" optional = false python-versions = "*" files = [ @@ -2592,7 +2633,6 @@ functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.8.2)", "prometh name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -2607,7 +2647,6 @@ tests = ["pytest"] name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2617,51 +2656,51 @@ files = [ [[package]] name = "pydantic" -version = "1.9.0" -description = "Data validation and settings management using python 3.6 type hinting" -category = "dev" +version = "1.10.11" +description = "Data validation and settings management using python type hints" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" files = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, + {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, + {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, + {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, + {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, + {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, + {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, + {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, + {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, ] [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -2671,7 +2710,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2682,11 +2720,59 @@ files = [ [package.extras] plugins = ["importlib-metadata"] +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymilvus" +version = "2.2.13" +description = "Python Sdk for Milvus" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymilvus-2.2.13-py3-none-any.whl", hash = "sha256:ac991863bd63e860c1210d096695297175c6ed09f4de762cf42394cb5aecd1f6"}, + {file = "pymilvus-2.2.13.tar.gz", hash = "sha256:72da36cb5f4f84d7a8307202fcaa9a7fc4497d28d2d2235045ba93a430691ef1"}, +] + +[package.dependencies] +environs = "<=9.5.0" +grpcio = ">=1.49.1,<=1.56.0" +numpy = {version = "<1.25.0", markers = "python_version <= \"3.8\""} +pandas = ">=1.2.4" +protobuf = ">=3.20.0" +ujson = ">=2.0.0" + +[[package]] +name = "pypika" +version = "0.48.9" +description = "A SQL query builder API for Python" +optional = false +python-versions = "*" +files = [ + {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, +] + [[package]] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "dev" optional = false python-versions = "*" files = [ @@ -2698,7 +2784,6 @@ files = [ name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2721,7 +2806,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.21.0" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2740,7 +2824,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2755,7 +2838,6 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2770,7 +2852,6 @@ cli = ["click (>=5.0)"] name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -2782,7 +2863,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -2804,59 +2884,57 @@ files = [ [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2946,7 +3024,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "regex" version = "2023.6.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3044,7 +3121,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3064,36 +3140,34 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.0.277" +version = "0.0.278" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.277-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:3250b24333ef419b7a232080d9724ccc4d2da1dbbe4ce85c4caa2290d83200f8"}, - {file = "ruff-0.0.277-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:3e60605e07482183ba1c1b7237eca827bd6cbd3535fe8a4ede28cbe2a323cb97"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7baa97c3d7186e5ed4d5d4f6834d759a27e56cf7d5874b98c507335f0ad5aadb"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74e4b206cb24f2e98a615f87dbe0bde18105217cbcc8eb785bb05a644855ba50"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:479864a3ccd8a6a20a37a6e7577bdc2406868ee80b1e65605478ad3b8eb2ba0b"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:468bfb0a7567443cec3d03cf408d6f562b52f30c3c29df19927f1e0e13a40cd7"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f32ec416c24542ca2f9cc8c8b65b84560530d338aaf247a4a78e74b99cd476b4"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14a7b2f00f149c5a295f188a643ac25226ff8a4d08f7a62b1d4b0a1dc9f9b85c"}, - {file = "ruff-0.0.277-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9879f59f763cc5628aa01c31ad256a0f4dc61a29355c7315b83c2a5aac932b5"}, - {file = "ruff-0.0.277-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f612e0a14b3d145d90eb6ead990064e22f6f27281d847237560b4e10bf2251f3"}, - {file = "ruff-0.0.277-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:323b674c98078be9aaded5b8b51c0d9c424486566fb6ec18439b496ce79e5998"}, - {file = "ruff-0.0.277-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3a43fbe026ca1a2a8c45aa0d600a0116bec4dfa6f8bf0c3b871ecda51ef2b5dd"}, - {file = "ruff-0.0.277-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:734165ea8feb81b0d53e3bf523adc2413fdb76f1264cde99555161dd5a725522"}, - {file = "ruff-0.0.277-py3-none-win32.whl", hash = "sha256:88d0f2afb2e0c26ac1120e7061ddda2a566196ec4007bd66d558f13b374b9efc"}, - {file = "ruff-0.0.277-py3-none-win_amd64.whl", hash = "sha256:6fe81732f788894a00f6ade1fe69e996cc9e485b7c35b0f53fb00284397284b2"}, - {file = "ruff-0.0.277-py3-none-win_arm64.whl", hash = "sha256:2d4444c60f2e705c14cd802b55cd2b561d25bf4311702c463a002392d3116b22"}, - {file = "ruff-0.0.277.tar.gz", hash = "sha256:2dab13cdedbf3af6d4427c07f47143746b6b95d9e4a254ac369a0edb9280a0d2"}, + {file = "ruff-0.0.278-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:1a90ebd8f2a554db1ee8d12b2f3aa575acbd310a02cd1a9295b3511a4874cf98"}, + {file = "ruff-0.0.278-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:38ca1c0c8c1221fe64c0a66784c91501d09a8ed02a4dbfdc117c0ce32a81eefc"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c62a0bde4d20d087cabce2fa8b012d74c2e985da86d00fb3359880469b90e31"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7545bb037823cd63dca19280f75a523a68bd3e78e003de74609320d6822b5a52"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb380d2d6fdb60656a0b5fa78305535db513fc72ce11f4532cc1641204ef380"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d11149c7b186f224f2055e437a030cd83b164a43cc0211314c33ad1553ed9c4c"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666e739fb2685277b879d493848afe6933e3be30d40f41fe0e571ad479d57d77"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec8b0469b54315803aaf1fbf9a37162a3849424cab6182496f972ad56e0ea702"}, + {file = "ruff-0.0.278-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c25b96602695a147d62a572865b753ef56aff1524abab13b9436724df30f9bd7"}, + {file = "ruff-0.0.278-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a48621f5f372d5019662db5b3dbfc5f1450f927683d75f1153fe0ebf20eb9698"}, + {file = "ruff-0.0.278-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1078125123a3c68e92463afacedb7e41b15ccafc09e510c6c755a23087afc8de"}, + {file = "ruff-0.0.278-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3ce0d620e257b4cad16e2f0c103b2f43a07981668a3763380542e8a131d11537"}, + {file = "ruff-0.0.278-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1cae4c07d334eb588f171f1363fa89a8911047eb93184276be11a24dbbc996c7"}, + {file = "ruff-0.0.278-py3-none-win32.whl", hash = "sha256:70d39f5599d8449082ab8ce542fa98e16413145eb411dd1dc16575b44565d52d"}, + {file = "ruff-0.0.278-py3-none-win_amd64.whl", hash = "sha256:e131595ab7f4ce61a1650463bd2fe304b49e7d0deb0dfa664b92817c97cdba5f"}, + {file = "ruff-0.0.278-py3-none-win_arm64.whl", hash = "sha256:737a0cfb6c36aaa92d97a46957dfd5e55329299074ad06ed12663b98e0c6fc82"}, + {file = "ruff-0.0.278.tar.gz", hash = "sha256:1a9f1d925204cfba81b18368b7ac943befcfccc3a41e170c91353b674c6b7a66"}, ] [[package]] name = "safetensors" version = "0.3.1" description = "Fast and Safe Tensor serialization" -category = "dev" optional = false python-versions = "*" files = [ @@ -3154,7 +3228,6 @@ torch = ["torch (>=1.10)"] name = "scikit-learn" version = "1.3.0" description = "A set of python modules for machine learning and data mining" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3197,7 +3270,6 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc ( name = "scipy" version = "1.9.3" description = "Fundamental algorithms for scientific computing in Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3236,7 +3308,6 @@ test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "sciki name = "sentence-transformers" version = "2.2.2" description = "Multilingual text embeddings" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -3259,7 +3330,6 @@ transformers = ">=4.6.0,<5.0.0" name = "sentencepiece" version = "0.1.99" description = "SentencePiece python wrapper" -category = "dev" optional = false python-versions = "*" files = [ @@ -3314,7 +3384,6 @@ files = [ name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3331,7 +3400,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3343,7 +3411,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3355,7 +3422,6 @@ files = [ name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -3373,14 +3439,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "starlette" -version = "0.20.4" +version = "0.27.0" description = "The little ASGI library that shines." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "starlette-0.20.4-py3-none-any.whl", hash = "sha256:c0414d5a56297d37f3db96a84034d61ce29889b9eaccf65eb98a0b39441fcaa3"}, - {file = "starlette-0.20.4.tar.gz", hash = "sha256:42fcf3122f998fefce3e2c5ad7e5edbf0f02cf685d646a83a08d404726af5084"}, + {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, + {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, ] [package.dependencies] @@ -3388,13 +3453,12 @@ anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] [[package]] name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3407,21 +3471,19 @@ mpmath = ">=0.19" [[package]] name = "threadpoolctl" -version = "3.1.0" +version = "3.2.0" description = "threadpoolctl" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"}, - {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"}, + {file = "threadpoolctl-3.2.0-py3-none-any.whl", hash = "sha256:2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032"}, + {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"}, ] [[package]] name = "tokenizers" version = "0.13.3" description = "Fast and Customizable Tokenizers" -category = "dev" optional = false python-versions = "*" files = [ @@ -3476,7 +3538,6 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3488,7 +3549,6 @@ files = [ name = "torch" version = "2.0.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -3544,7 +3604,6 @@ opt-einsum = ["opt-einsum (>=3.3)"] name = "torchvision" version = "0.15.1" description = "image and video datasets and models for torch deep learning" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3572,7 +3631,7 @@ files = [ [package.dependencies] numpy = "*" -pillow = ">=5.3.0,<8.3.0 || >=8.4.0" +pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" requests = "*" torch = "2.0.0" @@ -3583,7 +3642,6 @@ scipy = ["scipy"] name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3604,7 +3662,6 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3625,7 +3682,6 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3639,14 +3695,13 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] [[package]] name = "transformers" -version = "4.30.2" +version = "4.31.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "dev" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "transformers-4.30.2-py3-none-any.whl", hash = "sha256:c332e3a3097f9ed89ce556b403251235931c00237b8bc2d7adaa19d226c13f1d"}, - {file = "transformers-4.30.2.tar.gz", hash = "sha256:f4a8aac4e1baffab4033f4a345b0d7dc7957d12a4f1ba969afea08205a513045"}, + {file = "transformers-4.31.0-py3-none-any.whl", hash = "sha256:8487aab0195ce1c2a5ae189305118b9720daddbc7b688edb09ccd79e3b149f6b"}, + {file = "transformers-4.31.0.tar.gz", hash = "sha256:4302fba920a1c24d3a429a29efff6a63eac03f3f3cf55b55927fc795d01cb273"}, ] [package.dependencies] @@ -3662,20 +3717,20 @@ tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.20.2)"] -agents = ["Pillow", "accelerate (>=0.20.2)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] -all = ["Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.3)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] +accelerate = ["accelerate (>=0.20.3)"] +agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] +all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.20.2)", "deepspeed (>=0.8.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.2)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.3)", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.6.9)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.20.2)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.3)", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow", "accelerate (>=0.20.2)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.6.9)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf (<=3.20.3)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] +deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] docs-specific = ["hf-doc-builder"] fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1,<=0.6.9)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8,<=0.1.4)"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] integrations = ["optuna", "ray[tune]", "sigopt"] @@ -3689,29 +3744,28 @@ quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", ray = ["ray[tune]"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.3)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.3)", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] timm = ["timm"] tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.20.2)", "torch (>=1.9,!=1.12.0)"] +torch = ["accelerate (>=0.20.3)", "torch (>=1.9,!=1.12.0)"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.3)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] +torch-vision = ["Pillow (<10.0.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow"] +vision = ["Pillow (<10.0.0)"] [[package]] name = "triton" version = "2.0.0" description = "A language and compiler for custom Deep Learning operations" -category = "dev" optional = false python-versions = "*" files = [ @@ -3749,7 +3803,6 @@ tutorials = ["matplotlib", "pandas", "tabulate"] name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3761,7 +3814,6 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "dev" optional = false python-versions = ">=2" files = [ @@ -3769,16 +3821,85 @@ files = [ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] +[[package]] +name = "ujson" +version = "5.8.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, + {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf"}, + {file = "ujson-5.8.0-cp310-cp310-win32.whl", hash = "sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a"}, + {file = "ujson-5.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa"}, + {file = "ujson-5.8.0-cp311-cp311-win32.whl", hash = "sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879"}, + {file = "ujson-5.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30"}, + {file = "ujson-5.8.0-cp312-cp312-win32.whl", hash = "sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916"}, + {file = "ujson-5.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07"}, + {file = "ujson-5.8.0-cp38-cp38-win32.whl", hash = "sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564"}, + {file = "ujson-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc"}, + {file = "ujson-5.8.0-cp39-cp39-win32.whl", hash = "sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903"}, + {file = "ujson-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94"}, + {file = "ujson-5.8.0.tar.gz", hash = "sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425"}, +] + [[package]] name = "urllib3" -version = "2.0.3" +version = "2.0.4" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, - {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, ] [package.extras] @@ -3789,14 +3910,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.22.0" +version = "0.23.1" description = "The lightning-fast ASGI server." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"}, - {file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"}, + {file = "uvicorn-0.23.1-py3-none-any.whl", hash = "sha256:1d55d46b83ee4ce82b4e82f621f2050adb3eb7b5481c13f9af1744951cae2f1f"}, + {file = "uvicorn-0.23.1.tar.gz", hash = "sha256:da9b0c8443b2d7ee9db00a345f1eee6db7317432c9d4400f5049cc8d358383be"}, ] [package.dependencies] @@ -3806,7 +3926,8 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -3817,7 +3938,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3862,7 +3982,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -3877,14 +3996,13 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "virtualenv" -version = "20.23.1" +version = "20.24.0" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, - {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, + {file = "virtualenv-20.24.0-py3-none-any.whl", hash = "sha256:18d1b37fc75cc2670625702d76849a91ebd383768b4e91382a8d51be3246049e"}, + {file = "virtualenv-20.24.0.tar.gz", hash = "sha256:e2a7cef9da880d693b933db7654367754f14e20650dc60e8ee7385571f8593a3"}, ] [package.dependencies] @@ -3900,7 +4018,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "watchfiles" version = "0.19.0" description = "Simple, modern and high performance file watching and code reload in python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3935,7 +4052,6 @@ anyio = ">=3.0.0" name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -3947,7 +4063,6 @@ files = [ name = "weaviate-client" version = "3.22.1" description = "A python native Weaviate client" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3968,7 +4083,6 @@ grpc = ["grpcio", "grpcio-tools"] name = "websockets" version = "11.0.3" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4048,7 +4162,6 @@ files = [ name = "wheel" version = "0.40.0" description = "A built-package format for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4063,7 +4176,6 @@ test = ["pytest (>=6.0.0)"] name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -4078,7 +4190,6 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4164,80 +4275,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.16.0" +version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"}, - {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"}, + {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, + {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[[package]] -name = "zstandard" -version = "0.21.0" -description = "Zstandard bindings for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zstandard-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:649a67643257e3b2cff1c0a73130609679a5673bf389564bc6d4b164d822a7ce"}, - {file = "zstandard-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:144a4fe4be2e747bf9c646deab212666e39048faa4372abb6a250dab0f347a29"}, - {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b72060402524ab91e075881f6b6b3f37ab715663313030d0ce983da44960a86f"}, - {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8257752b97134477fb4e413529edaa04fc0457361d304c1319573de00ba796b1"}, - {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c053b7c4cbf71cc26808ed67ae955836232f7638444d709bfc302d3e499364fa"}, - {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2769730c13638e08b7a983b32cb67775650024632cd0476bf1ba0e6360f5ac7d"}, - {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d3bc4de588b987f3934ca79140e226785d7b5e47e31756761e48644a45a6766"}, - {file = "zstandard-0.21.0-cp310-cp310-win32.whl", hash = "sha256:67829fdb82e7393ca68e543894cd0581a79243cc4ec74a836c305c70a5943f07"}, - {file = "zstandard-0.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6048a287f8d2d6e8bc67f6b42a766c61923641dd4022b7fd3f7439e17ba5a4d"}, - {file = "zstandard-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f2afab2c727b6a3d466faee6974a7dad0d9991241c498e7317e5ccf53dbc766"}, - {file = "zstandard-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff0852da2abe86326b20abae912d0367878dd0854b8931897d44cfeb18985472"}, - {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12fa383e315b62630bd407477d750ec96a0f438447d0e6e496ab67b8b451d39"}, - {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1b9703fe2e6b6811886c44052647df7c37478af1b4a1a9078585806f42e5b15"}, - {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df28aa5c241f59a7ab524f8ad8bb75d9a23f7ed9d501b0fed6d40ec3064784e8"}, - {file = "zstandard-0.21.0-cp311-cp311-win32.whl", hash = "sha256:0aad6090ac164a9d237d096c8af241b8dcd015524ac6dbec1330092dba151657"}, - {file = "zstandard-0.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:48b6233b5c4cacb7afb0ee6b4f91820afbb6c0e3ae0fa10abbc20000acdf4f11"}, - {file = "zstandard-0.21.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e7d560ce14fd209db6adacce8908244503a009c6c39eee0c10f138996cd66d3e"}, - {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e6e131a4df2eb6f64961cea6f979cdff22d6e0d5516feb0d09492c8fd36f3bc"}, - {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1e0c62a67ff425927898cf43da2cf6b852289ebcc2054514ea9bf121bec10a5"}, - {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1545fb9cb93e043351d0cb2ee73fa0ab32e61298968667bb924aac166278c3fc"}, - {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6c821eb6870f81d73bf10e5deed80edcac1e63fbc40610e61f340723fd5f7c"}, - {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ddb086ea3b915e50f6604be93f4f64f168d3fc3cef3585bb9a375d5834392d4f"}, - {file = "zstandard-0.21.0-cp37-cp37m-win32.whl", hash = "sha256:57ac078ad7333c9db7a74804684099c4c77f98971c151cee18d17a12649bc25c"}, - {file = "zstandard-0.21.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1243b01fb7926a5a0417120c57d4c28b25a0200284af0525fddba812d575f605"}, - {file = "zstandard-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea68b1ba4f9678ac3d3e370d96442a6332d431e5050223626bdce748692226ea"}, - {file = "zstandard-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8070c1cdb4587a8aa038638acda3bd97c43c59e1e31705f2766d5576b329e97c"}, - {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af612c96599b17e4930fe58bffd6514e6c25509d120f4eae6031b7595912f85"}, - {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff891e37b167bc477f35562cda1248acc115dbafbea4f3af54ec70821090965"}, - {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fec02ce2b38e8b2e86079ff0b912445495e8ab0b137f9c0505f88ad0d61296"}, - {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdbe350691dec3078b187b8304e6a9c4d9db3eb2d50ab5b1d748533e746d099"}, - {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b69cccd06a4a0a1d9fb3ec9a97600055cf03030ed7048d4bcb88c574f7895773"}, - {file = "zstandard-0.21.0-cp38-cp38-win32.whl", hash = "sha256:9980489f066a391c5572bc7dc471e903fb134e0b0001ea9b1d3eff85af0a6f1b"}, - {file = "zstandard-0.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e1e94a9d9e35dc04bf90055e914077c80b1e0c15454cc5419e82529d3e70728"}, - {file = "zstandard-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2d61675b2a73edcef5e327e38eb62bdfc89009960f0e3991eae5cc3d54718de"}, - {file = "zstandard-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25fbfef672ad798afab12e8fd204d122fca3bc8e2dcb0a2ba73bf0a0ac0f5f07"}, - {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62957069a7c2626ae80023998757e27bd28d933b165c487ab6f83ad3337f773d"}, - {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e10ed461e4807471075d4b7a2af51f5234c8f1e2a0c1d37d5ca49aaaad49e8"}, - {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cff89a036c639a6a9299bf19e16bfb9ac7def9a7634c52c257166db09d950e7"}, - {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b2b5e3e7670bd25835e0e0730a236f2b0df87672d99d3bf4bf87248aa659fb"}, - {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b1367da0dde8ae5040ef0413fb57b5baeac39d8931c70536d5f013b11d3fc3a5"}, - {file = "zstandard-0.21.0-cp39-cp39-win32.whl", hash = "sha256:db62cbe7a965e68ad2217a056107cc43d41764c66c895be05cf9c8b19578ce9c"}, - {file = "zstandard-0.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8d200617d5c876221304b0e3fe43307adde291b4a897e7b0617a61611dfff6a"}, - {file = "zstandard-0.21.0.tar.gz", hash = "sha256:f08e3a10d01a247877e4cb61a82a319ea746c356a3786558bed2481e6c405546"}, -] - -[package.dependencies] -cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} - -[package.extras] -cffi = ["cffi (>=1.11)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "ff9788d0ab06d4466f676af0b7eeb8ec4759ea24d1af6ee5b477fdd36354495a" +content-hash = "f37b3f19b5902d10b7b30a5c5e80cd8283ed68bfc99f38f9295ba02c4a944d77" diff --git a/python/pyproject.toml b/python/pyproject.toml index 43e5ff2b68a4..982348b9adc2 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "semantic-kernel" -version = "0.3.2.dev" +version = "0.3.3.dev" description = "" authors = ["Microsoft "] readme = "pip/README.md" @@ -19,7 +19,7 @@ pre-commit = "3.3.3" black = {version = "23.3.0", allow-prereleases = true} ipykernel = "^6.21.1" pytest = "7.4.0" -ruff = "0.0.277" +ruff = "0.0.278" pytest-asyncio = "0.21.0" [tool.poetry.group.hugging_face.dependencies] @@ -28,8 +28,11 @@ sentence-transformers = "^2.2.2" torch = "2.0.0" [tool.poetry.group.chromadb.dependencies] -chromadb = "^0.3.23" +chromadb = "^0.4.0" +[tool.poetry.group.milvus.dependencies] +pymilvus = "^2.2.11" +milvus = "^2.2.11" [tool.poetry.group.weaviate.dependencies] weaviate-client = "^3.18.0" @@ -40,6 +43,12 @@ pinecone-client = "^2.2.2" [tool.poetry.group.postgres.dependencies] psycopg-pool = "^3.1.7" psycopg = "^3.1.9" +psycopg-binary = "^3.1.9" + +[tool.poetry.group.azure_cognitive_search.dependencies] +azure-search-documents = {version = "11.4.0b6", allow-prereleases = true} +azure-core = "^1.28.0" +azure-identity = "^1.13.0" [tool.isort] profile = "black" diff --git a/python/samples/kernel-syntax-examples/bing_search_skill.py b/python/samples/kernel-syntax-examples/bing_search_skill.py new file mode 100644 index 000000000000..f5369c5baa9f --- /dev/null +++ b/python/samples/kernel-syntax-examples/bing_search_skill.py @@ -0,0 +1,60 @@ +import os + +from dotenv import load_dotenv + +import semantic_kernel as sk +from semantic_kernel.connectors.ai.open_ai import OpenAITextCompletion +from semantic_kernel.connectors.search_engine import BingConnector +from semantic_kernel.core_skills import WebSearchEngineSkill + +load_dotenv() + + +async def main(): + kernel = sk.Kernel() + api_key, org_id = sk.openai_settings_from_dot_env() + kernel.add_text_completion_service( + "dv", OpenAITextCompletion("text-davinci-003", api_key, org_id) + ) + connector = BingConnector(api_key=os.getenv("BING_API_KEY")) + web_skill = kernel.import_skill(WebSearchEngineSkill(connector), "WebSearch") + + prompt = "Who is Leonardo DiCaprio's current girlfriend?" + search_async = web_skill["searchAsync"] + result = await search_async.invoke_async(prompt) + print(result) + + """ + Output: + ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's + Relationship From the beginning of their romance to today, we track their relationship here. By..."] + """ + + prompt = """ + Answer the question using only the data that is provided in the data section. + Do not use any prior knowledge to answer the question. + Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} + Question: What is semantic kernel? + Answer: + """ + + qna = kernel.create_semantic_function(prompt, temperature=0.2) + context = kernel.create_new_context() + context["num_results"] = "10" + context["offset"] = "0" + result = await qna.invoke_async(context=context) + print(result) + + """ + Output: + Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, + Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. + By doing so, you can create AI apps that combine the best of both worlds. + Semantic Kernel is at the center of the copilot stack. + """ + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py index 3d14361a7b19..16b1dc2b339c 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py @@ -214,7 +214,7 @@ async def _send_chat_request( request_settings.token_selection_biases if request_settings.token_selection_biases is not None and len(request_settings.token_selection_biases) > 0 - else None + else {} ), ) except Exception as ex: diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py index ed1efd2e5506..81e64e51ed61 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py @@ -146,7 +146,7 @@ async def _send_completion_request( request_settings.token_selection_biases if request_settings.token_selection_biases is not None and len(request_settings.token_selection_biases) > 0 - else None + else {} ), ) except Exception as ex: diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/__init__.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/__init__.py new file mode 100644 index 000000000000..8592bc7b7c43 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.connectors.memory.azure_cognitive_search.azure_cognitive_search_memory_store import ( + AzureCognitiveSearchMemoryStore, +) + +__all__ = ["AzureCognitiveSearchMemoryStore"] diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py new file mode 100644 index 000000000000..5ce2787dae70 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py @@ -0,0 +1,455 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import uuid +from logging import Logger +from typing import List, Optional, Tuple + +from azure.core.credentials import AzureKeyCredential, TokenCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.search.documents.indexes.aio import SearchIndexClient +from azure.search.documents.indexes.models import ( + SearchIndex, + VectorSearch, + VectorSearchAlgorithmConfiguration, +) +from numpy import ndarray + +from semantic_kernel.connectors.memory.azure_cognitive_search.utils import ( + SEARCH_FIELD_EMBEDDING, + SEARCH_FIELD_ID, + dict_to_memory_record, + encode_id, + get_field_selection, + get_index_schema, + get_search_index_async_client, + memory_record_to_search_record, +) +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.utils.null_logger import NullLogger + + +class AzureCognitiveSearchMemoryStore(MemoryStoreBase): + _search_index_client: SearchIndexClient = None + _vector_size: int = None + _logger: Logger = None + + def __init__( + self, + vector_size: int, + search_endpoint: Optional[str] = None, + admin_key: Optional[str] = None, + azure_credentials: Optional[AzureKeyCredential] = None, + token_credentials: Optional[TokenCredential] = None, + logger: Optional[Logger] = None, + ) -> None: + """Initializes a new instance of the AzureCognitiveSearchMemoryStore class. + + Arguments: + vector_size {int} -- Embedding vector size. + search_endpoint {Optional[str]} -- The endpoint of the Azure Cognitive Search service + (default: {None}). + admin_key {Optional[str]} -- Azure Cognitive Search API key (default: {None}). + azure_credentials {Optional[AzureKeyCredential]} -- Azure Cognitive Search credentials (default: {None}). + token_credentials {Optional[TokenCredential]} -- Azure Cognitive Search token credentials + (default: {None}). + logger {Optional[Logger]} -- The logger to use (default: {None}). + """ + try: + pass + except ImportError: + raise ValueError( + "Error: Unable to import Azure Cognitive Search client python package." + "Please install Azure Cognitive Search client" + ) + + self._logger = logger or NullLogger() + self._vector_size = vector_size + self._search_index_client = get_search_index_async_client( + search_endpoint, admin_key, azure_credentials, token_credentials + ) + + def __del__(self): + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + loop.create_task(self.close()) + else: + loop.run_until_complete(self.close()) + except Exception: + pass + + async def close(self): + if self._search_index_client is not None: + await self._search_index_client.close() + + async def create_collection_async( + self, + collection_name: str, + vector_config: Optional[VectorSearchAlgorithmConfiguration] = None, + ) -> None: + """Creates a new collection if it does not exist. + + Arguments: + collection_name {str} -- The name of the collection to create. + vector_config {VectorSearchAlgorithmConfiguration} -- Optional search algorithm configuration + (default: {None}). + semantic_config {SemanticConfiguration} -- Optional search index configuration (default: {None}). + Returns: + None + """ + + if vector_config: + vector_search = VectorSearch(algorithm_configurations=[vector_config]) + else: + vector_search = VectorSearch( + algorithm_configurations=[ + VectorSearchAlgorithmConfiguration( + name="az-vector-config", + kind="hnsw", + hnsw_parameters={ + # Number of bi-directional links, 4 to 10 + "m": 4, + # Size of nearest neighbors list during indexing, 100 to 1000 + "efConstruction": 400, + # Size of nearest neighbors list during search, 100 to 1000 + "efSearch": 500, + # cosine, dotProduct, euclidean + "metric": "cosine", + }, + ) + ] + ) + + if not self._search_index_client: + raise ValueError("Error: self._search_index_client not set 1.") + + if self._search_index_client is None: + raise ValueError("Error: self._search_index_client not set 2.") + + # Check to see if collection exists + collection_index = None + try: + collection_index = await self._search_index_client.get_index( + collection_name.lower() + ) + except ResourceNotFoundError: + pass + + if not collection_index: + # Create the search index with the semantic settings + index = SearchIndex( + name=collection_name.lower(), + fields=get_index_schema(self._vector_size), + vector_search=vector_search, + ) + + await self._search_index_client.create_index(index) + + async def get_collections_async(self) -> List[str]: + """Gets the list of collections. + + Returns: + List[str] -- The list of collections. + """ + + results_list = list(str) + items = self._search_index_client.list_index_names() + + for result in items: + results_list.append(result) + + return results_list + + async def get_collection_async(self, collection_name: str) -> SearchIndex: + """Gets the a collections based upon collection name. + + Arguments: + collection_name {str} -- Name of the collection. + + Returns: + SearchIndex -- Collection Information. + """ + + collection_result = await self._search_index_client.get_index( + name=collection_name.lower() + ) + + return collection_result + + async def delete_collection_async(self, collection_name: str) -> None: + """Deletes a collection. + + Arguments: + collection_name {str} -- The name of the collection to delete. + + Returns: + None + """ + await self._search_index_client.delete_index(index=collection_name.lower()) + + async def does_collection_exist_async(self, collection_name: str) -> bool: + """Checks if a collection exists. + + Arguments: + collection_name {str} -- The name of the collection to check. + + Returns: + bool -- True if the collection exists; otherwise, False. + """ + + try: + collection_result = await self._search_index_client.get_index( + name=collection_name.lower() + ) + + if collection_result: + return True + else: + return False + except ResourceNotFoundError: + return False + + async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + """Upsert a record. + + Arguments: + collection_name {str} -- The name of the collection to upsert the record into. + record {MemoryRecord} -- The record to upsert. + + Returns: + str -- The unique record id of the record. + """ + + # Look up Search client class to see if exists or create + search_client = self._search_index_client.get_search_client( + collection_name.lower() + ) + + # Note: + # * Document id = user provided value + # * MemoryRecord.id = base64(Document id) + if not record._id: + record._id = str(uuid.uuid4()) + + search_record = memory_record_to_search_record(record) + + result = await search_client.upload_documents(documents=[search_record]) + + # Throw exception if problem + # Clean this up not needed if throwing + if result[0].succeeded: + return record._id + else: + raise ValueError("Error: Unable to upsert record.") + + async def upsert_batch_async( + self, collection_name: str, records: List[MemoryRecord] + ) -> List[str]: + """Upsert a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to upsert the records into. + records {List[MemoryRecord]} -- The records to upsert. + + Returns: + List[str] -- The unique database keys of the records. + """ + + # Initialize search client here + # Look up Search client class to see if exists or create + search_client = self._search_index_client.get_search_client( + collection_name.lower() + ) + + search_records = [] + search_ids = [] + + for record in records: + # Note: + # * Document id = user provided value + # * MemoryRecord.id = base64(Document id) + if not record._id: + record._id = str(uuid.uuid4()) + + search_record = memory_record_to_search_record(record) + search_records.append(search_record) + search_ids.append(record._id) + + result = search_client.upload_documents(documents=search_records) + + if result[0].succeeded: + return search_ids + else: + return None + + async def get_async( + self, collection_name: str, key: str, with_embedding: bool = False + ) -> MemoryRecord: + """Gets a record. + + Arguments: + collection_name {str} -- The name of the collection to get the record from. + key {str} -- The unique database key of the record. + with_embedding {bool} -- Whether to include the embedding in the result. (default: {False}) + + Returns: + MemoryRecord -- The record. + """ + + # Look up Search client class to see if exists or create + search_client = self._search_index_client.get_search_client( + collection_name.lower() + ) + + try: + search_result = await search_client.get_document( + key=encode_id(key), selected_fields=get_field_selection(with_embedding) + ) + except ResourceNotFoundError: + raise KeyError("Memory record not found") + + # Create Memory record from document + return dict_to_memory_record(search_result, with_embedding) + + async def get_batch_async( + self, collection_name: str, keys: List[str], with_embeddings: bool = False + ) -> List[MemoryRecord]: + """Gets a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to get the records from. + keys {List[str]} -- The unique database keys of the records. + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[MemoryRecord] -- The records. + """ + + search_results = [] + + for key in keys: + search_result = await self.get_async( + collection_name=collection_name.lower(), + key=key, + with_embedding=with_embeddings, + ) + search_results.append(search_result) + + return search_results + + async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + """Removes a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to remove the records from. + keys {List[str]} -- The unique database keys of the records to remove. + + Returns: + None + """ + + for record_id in keys: + await self.remove_async( + collection_name=collection_name.lower(), key=encode_id(record_id) + ) + + async def remove_async(self, collection_name: str, key: str) -> None: + """Removes a record. + + Arguments: + collection_name {str} -- The name of the collection to remove the record from. + key {str} -- The unique database key of the record to remove. + + Returns: + None + """ + + # Look up Search client class to see if exists or create + search_client = self._search_index_client.get_search_client( + collection_name.lower() + ) + docs_to_delete = {SEARCH_FIELD_ID: encode_id(key)} + + await search_client.delete_documents(documents=[docs_to_delete]) + + async def get_nearest_match_async( + self, + collection_name: str, + embedding: ndarray, + min_relevance_score: float = 0.0, + with_embedding: bool = False, + ) -> Tuple[MemoryRecord, float]: + """Gets the nearest match to an embedding using vector configuration parameters. + + Arguments: + collection_name {str} -- The name of the collection to get the nearest match from. + embedding {ndarray} -- The embedding to find the nearest match to. + min_relevance_score {float} -- The minimum relevance score of the match. (default: {0.0}) + with_embedding {bool} -- Whether to include the embedding in the result. (default: {False}) + + Returns: + Tuple[MemoryRecord, float] -- The record and the relevance score. + """ + + memory_records = await self.get_nearest_matches_async( + collection_name=collection_name, + embedding=embedding, + min_relevance_score=min_relevance_score, + with_embeddings=with_embedding, + limit=1, + ) + + if len(memory_records) > 0: + return memory_records[0] + else: + return None + + async def get_nearest_matches_async( + self, + collection_name: str, + embedding: ndarray, + limit: int, + min_relevance_score: float = 0.0, + with_embeddings: bool = False, + ) -> List[Tuple[MemoryRecord, float]]: + """Gets the nearest matches to an embedding using vector configuration. + + Parameters: + collection_name (str) -- The name of the collection to get the nearest matches from. + embedding (ndarray) -- The embedding to find the nearest matches to. + limit {int} -- The maximum number of matches to return. + min_relevance_score {float} -- The minimum relevance score of the matches. (default: {0.0}) + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[Tuple[MemoryRecord, float]] -- The records and their relevance scores. + """ + + # Look up Search client class to see if exists or create + search_client = self._search_index_client.get_search_client( + collection_name.lower() + ) + + search_results = await search_client.search( + search_text="*", + vector_fields=SEARCH_FIELD_EMBEDDING, + vector=embedding.tolist(), + select=get_field_selection(with_embeddings), + top_k=limit, + ) + + if not search_results or search_results is None: + return [] + + # Convert the results to MemoryRecords + nearest_results = [] + async for search_record in search_results: + if search_record["@search.score"] < min_relevance_score: + continue + + memory_record = dict_to_memory_record(search_record, with_embeddings) + nearest_results.append((memory_record, search_record["@search.score"])) + + return nearest_results diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py new file mode 100644 index 000000000000..5fe9d88dedba --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py @@ -0,0 +1,261 @@ +# Copyright (c) Microsoft. All rights reserved. + +import base64 +import os +from typing import List, Optional + +from azure.core.credentials import AzureKeyCredential, TokenCredential +from azure.search.documents.indexes.models import ( + SearchableField, + SearchField, + SearchFieldDataType, + SimpleField, +) +from dotenv import load_dotenv + +from semantic_kernel.memory.memory_record import MemoryRecord + +SEARCH_FIELD_ID = "Id" +SEARCH_FIELD_TEXT = "Text" +SEARCH_FIELD_EMBEDDING = "Embedding" +SEARCH_FIELD_SRC = "ExternalSourceName" +SEARCH_FIELD_DESC = "Description" +SEARCH_FIELD_METADATA = "AdditionalMetadata" +SEARCH_FIELD_IS_REF = "IsReference" + + +def get_search_index_async_client( + search_endpoint: Optional[str] = None, + admin_key: Optional[str] = None, + azure_credential: Optional[AzureKeyCredential] = None, + token_credential: Optional[TokenCredential] = None, +): + """Return a client for Azure Cognitive Search. + + Arguments: + search_endpoint {str} -- Optional endpoint (default: {None}). + admin_key {str} -- Optional API key (default: {None}). + azure_credential {AzureKeyCredential} -- Optional Azure credentials (default: {None}). + token_credential {TokenCredential} -- Optional Token credential (default: {None}). + """ + + ENV_VAR_ENDPOINT = "AZURE_COGNITIVE_SEARCH_ENDPOINT" + ENV_VAR_API_KEY = "AZURE_COGNITIVE_SEARCH_ADMIN_KEY" + + try: + # Note: there are two client classes available: + # 1. Async: azure.search.documents.indexes.aio.SearchIndexClient + # 2. Sync: azure.search.documents.indexes.SearchIndexClient + from azure.search.documents.indexes.aio import SearchIndexClient + except ImportError: + raise ValueError( + "Error: Unable to import Azure Cognitive Search client python package." + "Please install Azure Cognitive Search client" + ) + + azure_credential: AzureKeyCredential = None + token_credential: TokenCredential = None + + # Load environment variables + load_dotenv() + + # Service endpoint + if search_endpoint: + service_endpoint = search_endpoint + elif os.getenv(ENV_VAR_ENDPOINT): + service_endpoint = os.getenv(ENV_VAR_ENDPOINT) + else: + raise ValueError("Error: missing Azure Cognitive Search client endpoint.") + + if service_endpoint is None: + print(service_endpoint) + raise ValueError("Error: Azure Cognitive Search client not set.") + + # Credentials + if admin_key: + azure_credential = AzureKeyCredential(admin_key) + elif azure_credential: + azure_credential = azure_credential + elif token_credential: + token_credential = token_credential + elif os.getenv(ENV_VAR_API_KEY): + azure_credential = AzureKeyCredential(os.getenv(ENV_VAR_API_KEY)) + else: + raise ValueError("Error: missing Azure Cognitive Search client credentials.") + + if azure_credential is None and token_credential is None: + raise ValueError("Error: Azure Cognitive Search credentials not set.") + + sk_headers = {"User-Agent": "Semantic-Kernel"} + + if azure_credential: + return SearchIndexClient( + endpoint=service_endpoint, credential=azure_credential, headers=sk_headers + ) + + if token_credential: + return SearchIndexClient( + endpoint=service_endpoint, credential=token_credential, headers=sk_headers + ) + + raise ValueError("Error: unable to create Azure Cognitive Search client.") + + +def get_index_schema(vector_size: int) -> list: + """Return the schema of search indexes. + + Arguments: + vector_size {int} -- The size of the vectors being stored in collection/index. + + Returns: + list -- The Azure Cognitive Search schema as list type. + """ + + search_fields = [ + SimpleField( + name=SEARCH_FIELD_ID, + type=SearchFieldDataType.String, + searchable=True, + filterable=True, + retrievable=True, + key=True, + ), + SearchableField( + name=SEARCH_FIELD_TEXT, + type=SearchFieldDataType.String, + searchable=True, + filterable=True, + retrievable=True, + ), + SearchField( + name=SEARCH_FIELD_EMBEDDING, + type=SearchFieldDataType.Collection(SearchFieldDataType.Single), + searchable=True, + vector_search_dimensions=vector_size, + vector_search_configuration="az-vector-config", + ), + SimpleField( + name=SEARCH_FIELD_SRC, + type=SearchFieldDataType.String, + searchable=True, + filterable=True, + retrievable=True, + ), + SimpleField( + name=SEARCH_FIELD_DESC, + type=SearchFieldDataType.String, + searchable=True, + filterable=True, + retrievable=True, + ), + SimpleField( + name=SEARCH_FIELD_METADATA, + type=SearchFieldDataType.String, + searchable=True, + filterable=True, + retrievable=True, + ), + SimpleField( + name=SEARCH_FIELD_IS_REF, + type=SearchFieldDataType.Boolean, + searchable=True, + filterable=True, + retrievable=True, + ), + ] + + return search_fields + + +def get_field_selection(with_embeddings: bool) -> List[str]: + """Get the list of fields to search and load. + + Arguments: + with_embedding {bool} -- Whether to include the embedding vector field. + + Returns: + List[str] -- List of fields. + """ + + field_selection = [ + SEARCH_FIELD_ID, + SEARCH_FIELD_TEXT, + SEARCH_FIELD_SRC, + SEARCH_FIELD_DESC, + SEARCH_FIELD_METADATA, + SEARCH_FIELD_IS_REF, + ] + + if with_embeddings: + field_selection.append(SEARCH_FIELD_EMBEDDING) + + return field_selection + + +def dict_to_memory_record(data: dict, with_embeddings: bool) -> MemoryRecord: + """Converts a search result to a MemoryRecord. + + Arguments: + data {dict} -- Azure Cognitive Search result data. + + Returns: + MemoryRecord -- The MemoryRecord from Azure Cognitive Search Data Result. + """ + + sk_result = MemoryRecord( + id=decode_id(data[SEARCH_FIELD_ID]), + key=data[SEARCH_FIELD_ID], + text=data[SEARCH_FIELD_TEXT], + external_source_name=data[SEARCH_FIELD_SRC], + description=data[SEARCH_FIELD_DESC], + additional_metadata=data[SEARCH_FIELD_METADATA], + is_reference=data[SEARCH_FIELD_IS_REF], + embedding=data[SEARCH_FIELD_EMBEDDING] if with_embeddings else None, + timestamp=None, + ) + return sk_result + + +def memory_record_to_search_record(record: MemoryRecord) -> dict: + """Convert a MemoryRecord to a dictionary + + Arguments: + record {MemoryRecord} -- The MemoryRecord from Azure Cognitive Search Data Result. + + Returns: + data {dict} -- Dictionary data. + """ + + return { + SEARCH_FIELD_ID: encode_id(record._id), + SEARCH_FIELD_TEXT: str(record._text), + SEARCH_FIELD_SRC: record._external_source_name or "", + SEARCH_FIELD_DESC: record._description or "", + SEARCH_FIELD_METADATA: record._additional_metadata or "", + SEARCH_FIELD_IS_REF: str(record._is_reference), + SEARCH_FIELD_EMBEDDING: record._embedding.tolist(), + } + + +def encode_id(id: str) -> str: + """Encode a record id to ensure compatibility with Azure Cognitive Search. + + Azure Cognitive Search keys can contain only letters, digits, underscore, dash, + equal sign, recommending to encode values with a URL-safe algorithm. + """ + + id_bytes = id.encode("ascii") + base64_bytes = base64.b64encode(id_bytes) + return base64_bytes.decode("ascii") + + +def decode_id(base64_id: str) -> str: + """Decode a record id to the original value. + + Azure Cognitive Search keys can contain only letters, digits, underscore, dash, + equal sign, recommending to encode values with a URL-safe algorithm. + """ + + base64_bytes = base64_id.encode("ascii") + message_bytes = base64.b64decode(base64_bytes) + return message_bytes.decode("ascii") diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py index a91cf245a733..430ddaba1d8c 100644 --- a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +++ b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py @@ -68,7 +68,7 @@ def __init__( self._client_settings = chromadb.config.Settings() if persist_directory is not None: self._client_settings = chromadb.config.Settings( - chroma_db_impl="duckdb+parquet", persist_directory=persist_directory + is_persistent=True, persist_directory=persist_directory ) self._client = chromadb.Client(self._client_settings) self._persist_directory = persist_directory @@ -161,7 +161,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: record._key = record._id metadata = { "timestamp": record._timestamp or "", - "is_reference": record._is_reference, + "is_reference": str(record._is_reference), "external_source_name": record._external_source_name or "", "description": record._description or "", "additional_metadata": record._additional_metadata or "", diff --git a/python/semantic_kernel/connectors/memory/chroma/utils.py b/python/semantic_kernel/connectors/memory/chroma/utils.py index 04cb509b8451..fa45441569ed 100644 --- a/python/semantic_kernel/connectors/memory/chroma/utils.py +++ b/python/semantic_kernel/connectors/memory/chroma/utils.py @@ -40,7 +40,7 @@ def query_results_to_records( memory_records = [ ( MemoryRecord( - is_reference=metadata["is_reference"], + is_reference=(metadata["is_reference"] == "True"), external_source_name=metadata["external_source_name"], id=metadata["id"], description=metadata["description"], @@ -62,7 +62,7 @@ def query_results_to_records( memory_records = [ ( MemoryRecord( - is_reference=metadata["is_reference"], + is_reference=(metadata["is_reference"] == "True"), external_source_name=metadata["external_source_name"], id=metadata["id"], description=metadata["description"], diff --git a/python/semantic_kernel/connectors/memory/milvus/__init__.py b/python/semantic_kernel/connectors/memory/milvus/__init__.py new file mode 100644 index 000000000000..afaeaa606786 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/milvus/__init__.py @@ -0,0 +1,5 @@ +from semantic_kernel.connectors.memory.milvus.milvus_memory_store import ( + MilvusMemoryStore, +) + +__all__ = ["MilvusMemoryStore"] diff --git a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py new file mode 100644 index 000000000000..709fd14cedac --- /dev/null +++ b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py @@ -0,0 +1,470 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import List, Optional, Tuple + +from numpy import array, expand_dims, ndarray +from pymilvus.milvus_client import milvus_client + +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.utils.null_logger import NullLogger + + +def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict: + """Convert a memoryrecord into a dict. + Args: + mem (MemoryRecord): MemoryRecord to convert. + + Returns: + dict: Dict result. + """ + ret_dict = {} + # Grab all the class vars + for key, val in vars(mem).items(): + if val is not None: + # Remove underscore + ret_dict[key[1:]] = val + return ret_dict + + +def milvus_dict_to_memoryrecord(milvus_dict: dict) -> MemoryRecord: + """Convert Milvus search result dict into MemoryRecord. + + Args: + milvus_dict (dict): Search hit + + Returns: + MemoryRecord + """ + # Embedding needs conversion to numpy array + embedding = milvus_dict.get("embedding", None) + if embedding is not None: + embedding = array(embedding) + return MemoryRecord( + is_reference=milvus_dict.get("is_reference", None), + external_source_name=milvus_dict.get("external_source_name", None), + id=milvus_dict.get("id", None), + description=milvus_dict.get("description", None), + text=milvus_dict.get("text", None), + additional_metadata=milvus_dict.get("additional_metadata", None), + embedding=embedding, + key=milvus_dict.get("key", None), + timestamp=milvus_dict.get("timestamp", None), + ) + + +# Default field values +ID_FIELD = "id" +ID_TYPE = "str" +EMBEDDING_FIELD = "embedding" + + +class MilvusMemoryStore(MemoryStoreBase): + def __init__( + self, + uri: str = "http://localhost:19530", + token: Optional[str] = None, + logger: Optional[Logger] = None, + ) -> None: + """MilvusMemoryStore allows for searching for records using Milvus/Zilliz Cloud. + + For more details on how to get the service started, take a look here: + Milvus: https://milvus.io/docs/get_started.md + Zilliz Cloud: https://docs.zilliz.com/docs/quick-start + + + Args: + uri (str, optional): The uri of the cluster. Defaults to + "http://localhost:19530". + token (Optional[str], optional): The token to connect to the cluster if + authentication is required. Defaults to None. + logger (Optional[Logger], optional): Logger to use. Defaults to None. + """ + self._uri = uri + self._token = (token,) + self._logger = logger or NullLogger() + self._client = milvus_client.MilvusClient( + uri=uri, + token=token, + ) + self._metric_cache = {} + + async def create_collection_async( + self, + collection_name: str, + dimension_num: Optional[int] = 1536, + distance_type: Optional[str] = "IP", + overwrite: bool = False, + consistency: str = "Session", + ) -> None: + """Create a Milvus collection. + + Args: + collection_name (str): The name of the collection. + dimension_num (Optional[int], optional): The size of the embeddings being + stored. Defaults to 1536. + distance_type (Optional[str], optional): Which distance function, at the + moment only "IP" and "L2" are supported. Defaults to "IP". + overwrite (bool, optional): Whether to overwrite any existing collection + with the same name. Defaults to False. + consistency (str, optional): Which consistency level to use: + Strong, Session, Bounded, Eventually. Defaults to "Session". + """ + if collection_name in self._client.list_collections(): + if overwrite: + self._client.drop_collection(collection_name=collection_name) + self._client.create_collection( + collection_name=collection_name, + dimension=dimension_num, + primary_field_name=ID_FIELD, + id_type=ID_TYPE, + auto_id=False, + vector_field_name=EMBEDDING_FIELD, + metric_type=distance_type, + max_length=65_535, + consistency_level=consistency, + ) + else: + self._client.create_collection( + collection_name=collection_name, + dimension=dimension_num, + primary_field_name=ID_FIELD, + id_type=ID_TYPE, + auto_id=False, + vector_field_name=EMBEDDING_FIELD, + metric_type=distance_type, + max_length=65_535, + consistency_level=consistency, + ) + + async def get_collections_async( + self, + ) -> List[str]: + """Return a list of present collections. + + Returns: + List[str]: List of collection names. + """ + return self._client.list_collections() + + async def delete_collection_async( + self, collection_name: str = "", all: bool = False + ) -> None: + """Delete the specified collection. + + If all is True, all collections in the cluster will be removed. + + Args: + collection_name (str, optional): The name of the collection to delete. Defaults to "". + all (bool, optional): Whether to delete all collections. Defaults to False. + """ + cols = self._client.list_collections() + if all: + for x in cols: + self._client.drop_collection(x) + elif collection_name in cols: + self._client.drop_collection(collection_name) + + async def does_collection_exist_async(self, collection_name: str) -> bool: + """Return if the collection exists in the cluster. + + Args: + collection_name (str): The name of the collection. + + Returns: + bool: True if it exists, False otherwise. + """ + return True if collection_name in self._client.list_collections() else False + + async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + """Upsert a single MemoryRecord into the collection. + + Args: + collection_name (str): The name of the collection. + record (MemoryRecord): The record to store. + + Returns: + str: The ID of the inserted record. + """ + # Use the batch insert with a total batch + res = await self.upsert_batch_async( + collection_name=collection_name, + records=[record], + batch_size=0, + ) + return res[0] + + async def upsert_batch_async( + self, collection_name: str, records: List[MemoryRecord], batch_size=100 + ) -> List[str]: + """_summary_ + + Args: + collection_name (str): The collection name. + records (List[MemoryRecord]): A list of memory records. + batch_size (int, optional): Batch size of the insert, 0 is a batch + size of total size. Defaults to 100. + + Raises: + Exception: Collection doesnt exist. + e: Failed to upsert a record. + + Returns: + List[str]: A list of inserted ID's. + """ + # Check if the collection exists. + if collection_name not in self._client.list_collections(): + self._logger.debug( + f"Collection {collection_name} does not exist, cannot insert." + ) + raise Exception( + f"Collection {collection_name} does not exist, cannot insert." + ) + # Convert the records to dicts + insert_list = [memoryrecord_to_milvus_dict(record) for record in records] + # The ids to remove + delete_ids = [insert[ID_FIELD] for insert in insert_list] + try: + # First delete then insert to have upsert + self._client.delete(collection_name=collection_name, pks=delete_ids) + return self._client.insert( + collection_name=collection_name, data=insert_list, batch_size=batch_size + ) + except Exception as e: + self._logger.debug(f"Upsert failed due to: {e}") + raise e + + async def get_async( + self, collection_name: str, key: str, with_embedding: bool + ) -> MemoryRecord: + """Get the MemoryRecord corresponding to the key. + + Args: + collection_name (str): The collection to get from. + key (str): The ID to grab. + with_embedding (bool): Whether to include the embedding in the results. + + Returns: + MemoryRecord: The MemoryRecord for the key. + """ + res = await self.get_batch_async( + collection_name=collection_name, keys=[key], with_embeddings=with_embedding + ) + return res[0] + + async def get_batch_async( + self, collection_name: str, keys: List[str], with_embeddings: bool + ) -> List[MemoryRecord]: + """Get the MemoryRecords corresponding to the keys + + Args: + collection_name (str): _description_ + keys (List[str]): _description_ + with_embeddings (bool): _description_ + + Raises: + Exception: _description_ + e: _description_ + + Returns: + List[MemoryRecord]: _description_ + """ + # Check if the collection exists + if collection_name not in self._client.list_collections(): + self._logger.debug( + f"Collection {collection_name} does not exist, cannot get." + ) + raise Exception("Collection {collection_name} does not exist, cannot get.") + try: + gets = self._client.get( + collection_name=collection_name, + ids=keys, + output_fields=["*"] if not with_embeddings else ["*", EMBEDDING_FIELD], + ) + return [milvus_dict_to_memoryrecord(get) for get in gets] + except Exception as e: + self._logger.debug(f"Get failed due to: {e}") + raise e + + async def remove_async(self, collection_name: str, key: str) -> None: + """Remove the specified record based on key. + + Args: + collection_name (str): Collection to remove from. + key (str): The key to remove. + """ + await self.remove_batch_async(collection_name=collection_name, keys=[key]) + + async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + """Remove multiple records based on keys. + + Args: + collection_name (str): Collection to remove from + keys (List[str]): The list of keys. + + Raises: + Exception: Collection doesnt exist. + e: Failure to remove key. + """ + if collection_name not in self._client.list_collections(): + self._logger.debug( + f"Collection {collection_name} does not exist, cannot remove." + ) + raise Exception( + f"Collection {collection_name} does not exist, cannot remove." + ) + try: + self._client.delete( + collection_name=collection_name, + pks=keys, + ) + except Exception as e: + self._logger.debug(f"Remove failed due to: {e}") + raise e + + def _search(self, collection_name, data, limit, distance_metric): + """Helper function to search with correct distance metric due to bug""" + # TODO Remove after https://github.com/milvus-io/milvus/issues/25504 + # Simple way to select opposite + distance_pairs = { + "l2": "IP", + "ip": "L2", + } + try: + # Try with passed in metric + results = self._client.search( + collection_name=collection_name, + data=data, + limit=limit, + search_params={"metric_type": distance_metric}, + output_fields=["*"], + )[0] + return results, distance_metric + except Exception as e: + self._logger.debug(f"Search failed with IP, testing L2: {e}") + try: + distance_metric = distance_pairs[distance_metric.lower()] + results = self._client.search( + collection_name=collection_name, + data=data, + limit=limit, + search_params={"metric_type": distance_metric}, + output_fields=["*"], + )[0] + return results, distance_metric + except Exception as e: + self._logger.debug(f"Search failed with L2: {e}") + raise e + + async def get_nearest_matches_async( + self, + collection_name: str, + embedding: ndarray, + limit: int, + min_relevance_score: float = None, + with_embeddings: bool = False, + ) -> List[Tuple[MemoryRecord, float]]: + """Find the nearest `limit` matches for an embedding. + + Args: + collection_name (str): The collection to search. + embedding (ndarray): The embedding to search. + limit (int): The total results to display. + min_relevance_score (float, optional): Minimum distance to include. Defaults to None. + with_embeddings (bool, optional): Whether to include embeddings in result. Defaults to False. + + Raises: + Exception: Missing collection + e: Failure to search + + Returns: + List[Tuple[MemoryRecord, float]]: MemoryRecord and distance tuple. + """ + # Check if collection exists + if collection_name not in self._client.list_collections(): + self._logger.debug( + f"Collection {collection_name} does not exist, cannot search." + ) + raise Exception( + f"Collection {collection_name} does not exist, cannot search." + ) + # Search requests takes a list of requests. + if len(embedding.shape) == 1: + embedding = expand_dims(embedding, axis=0) + + # Search based on the cached metric + results, search_type = self._search( + collection_name=collection_name, + data=embedding, + limit=limit, + distance_metric=self._metric_cache.get(collection_name, None) or "IP", + ) + + # Update cached metric + self._metric_cache[collection_name] = search_type + + cleaned_results = [] + + if with_embeddings: + ids = [] + + # Clean up results, filter and get ids for fetch + for x in results: + if min_relevance_score is not None and x["distance"] < min_relevance_score: + continue + cleaned_results.append(x) + if with_embeddings: + ids.append(x[ID_FIELD]) + + if with_embeddings: + try: + vectors = self._client.get( + collection_name=collection_name, + ids=ids, + output_fields=[EMBEDDING_FIELD], + ) + except Exception as e: + self._logger.debug(f"Get embeddings in search failed due to: {e}.") + raise e + + vectors = {res[ID_FIELD]: res[EMBEDDING_FIELD] for res in vectors} + for res in results: + res["entity"][EMBEDDING_FIELD] = vectors[res[ID_FIELD]] + + results = [ + (milvus_dict_to_memoryrecord(result["entity"]), result["distance"]) + for result in results + ] + + return results + + async def get_nearest_match_async( + self, + collection_name: str, + embedding: ndarray, + min_relevance_score: float = None, + with_embedding: bool = False, + ) -> Tuple[MemoryRecord, float]: + """Find the nearest match for an embedding. + + Args: + collection_name (str): The collection to search. + embedding (ndarray): The embedding to search for. + min_relevance_score (float, optional): T. Defaults to 0.0. + with_embedding (bool, optional): Whether to include embedding in result. Defaults to False. + + Returns: + Tuple[MemoryRecord, float]: A tuple of record and distance. + """ + m = await self.get_nearest_matches_async( + collection_name, + embedding, + 1, + min_relevance_score, + with_embedding, + ) + if len(m) > 0: + return m[0] + else: + return None diff --git a/python/semantic_kernel/connectors/search_engine/__init__.py b/python/semantic_kernel/connectors/search_engine/__init__.py new file mode 100644 index 000000000000..dc09a678650f --- /dev/null +++ b/python/semantic_kernel/connectors/search_engine/__init__.py @@ -0,0 +1,3 @@ +from semantic_kernel.connectors.search_engine.bing_connector import BingConnector + +__all__ = ["BingConnector"] diff --git a/python/semantic_kernel/connectors/search_engine/bing_connector.py b/python/semantic_kernel/connectors/search_engine/bing_connector.py new file mode 100644 index 000000000000..3bc6c8805434 --- /dev/null +++ b/python/semantic_kernel/connectors/search_engine/bing_connector.py @@ -0,0 +1,82 @@ +import urllib +from logging import Logger +from typing import List, Optional + +import aiohttp + +from semantic_kernel.connectors.search_engine.connector import ConnectorBase +from semantic_kernel.utils.null_logger import NullLogger + + +class BingConnector(ConnectorBase): + """ + A search engine connector that uses the Bing Search API to perform a web search + """ + + _api_key: str + + def __init__(self, api_key: str, logger: Optional[Logger] = None) -> None: + self._api_key = api_key + self._logger = logger if logger else NullLogger() + + if not self._api_key: + raise ValueError( + "Bing API key cannot be null. Please set environment variable BING_API_KEY." + ) + + async def search_async( + self, query: str, num_results: str, offset: str + ) -> List[str]: + """ + Returns the search results of the query provided by pinging the Bing web search API. + Returns `num_results` results and ignores the first `offset`. + + :param query: search query + :param num_results: the number of search results to return + :param offset: the number of search results to ignore + :return: list of search results + """ + if not query: + raise ValueError("query cannot be 'None' or empty.") + + if not num_results: + num_results = 1 + if not offset: + offset = 0 + + num_results = int(num_results) + offset = int(offset) + + if num_results <= 0: + raise ValueError("num_results value must be greater than 0.") + if num_results >= 50: + raise ValueError("num_results value must be less than 50.") + + if offset < 0: + raise ValueError("offset must be greater than 0.") + + self._logger.info( + f"Received request for bing web search with \ + params:\nquery: {query}\nnum_results: {num_results}\noffset: {offset}" + ) + + _base_url = "https://api.bing.microsoft.com/v7.0/search" + _request_url = f"{_base_url}?q={urllib.parse.quote_plus(query)}&count={num_results}&offset={offset}" + + self._logger.info(f"Sending GET request to {_request_url}") + + headers = {"Ocp-Apim-Subscription-Key": self._api_key} + + async with aiohttp.ClientSession() as session: + async with session.get( + _request_url, headers=headers, raise_for_status=True + ) as response: + if response.status == 200: + data = await response.json() + pages = data["webPages"]["value"] + self._logger.info(pages) + result = list(map(lambda x: x["snippet"], pages)) + self._logger.info(result) + return result + else: + return [] diff --git a/python/semantic_kernel/connectors/search_engine/connector.py b/python/semantic_kernel/connectors/search_engine/connector.py new file mode 100644 index 000000000000..7b8857b1679b --- /dev/null +++ b/python/semantic_kernel/connectors/search_engine/connector.py @@ -0,0 +1,7 @@ +class ConnectorBase: + """ + Base class for search engine connectors + """ + + def search_async(self, query: str, num_results: str, offset: str) -> str: + pass diff --git a/python/semantic_kernel/core_skills/__init__.py b/python/semantic_kernel/core_skills/__init__.py index a7ed165f1164..e9193dddea12 100644 --- a/python/semantic_kernel/core_skills/__init__.py +++ b/python/semantic_kernel/core_skills/__init__.py @@ -9,6 +9,7 @@ from semantic_kernel.core_skills.text_memory_skill import TextMemorySkill from semantic_kernel.core_skills.text_skill import TextSkill from semantic_kernel.core_skills.time_skill import TimeSkill +from semantic_kernel.core_skills.web_search_engine_skill import WebSearchEngineSkill __all__ = [ "TextMemorySkill", @@ -18,4 +19,5 @@ "HttpSkill", "ConversationSummarySkill", "MathSkill", + "WebSearchEngineSkill", ] diff --git a/python/semantic_kernel/core_skills/file_io_skill.py b/python/semantic_kernel/core_skills/file_io_skill.py index ddd45c0c24f1..250af9307864 100644 --- a/python/semantic_kernel/core_skills/file_io_skill.py +++ b/python/semantic_kernel/core_skills/file_io_skill.py @@ -1,3 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + import os import aiofiles diff --git a/python/semantic_kernel/core_skills/text_skill.py b/python/semantic_kernel/core_skills/text_skill.py index b80d13ee4736..9e463caaca44 100644 --- a/python/semantic_kernel/core_skills/text_skill.py +++ b/python/semantic_kernel/core_skills/text_skill.py @@ -1,3 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + from semantic_kernel.skill_definition import sk_function diff --git a/python/semantic_kernel/core_skills/time_skill.py b/python/semantic_kernel/core_skills/time_skill.py index b903652a127b..227ff5286d95 100644 --- a/python/semantic_kernel/core_skills/time_skill.py +++ b/python/semantic_kernel/core_skills/time_skill.py @@ -1,3 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + import datetime from semantic_kernel.skill_definition import sk_function diff --git a/python/semantic_kernel/core_skills/wait_skill.py b/python/semantic_kernel/core_skills/wait_skill.py index a59de9df389b..8f4ca28fbd95 100644 --- a/python/semantic_kernel/core_skills/wait_skill.py +++ b/python/semantic_kernel/core_skills/wait_skill.py @@ -1,3 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + import asyncio from semantic_kernel.skill_definition import sk_function diff --git a/python/semantic_kernel/core_skills/web_search_engine_skill.py b/python/semantic_kernel/core_skills/web_search_engine_skill.py new file mode 100644 index 000000000000..8012435648f5 --- /dev/null +++ b/python/semantic_kernel/core_skills/web_search_engine_skill.py @@ -0,0 +1,52 @@ +from semantic_kernel.connectors.search_engine.connector import ConnectorBase +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter + + +class WebSearchEngineSkill: + """ + Description: A skill that provides web search engine functionality + + Usage: + connector = BingConnector(bing_search_api_key) + kernel.import_skill(WebSearchEngineSkill(connector), skill_name="WebSearch") + + Examples: + {{WebSearch.SearchAsync "What is semantic kernel?"}} + => Returns the first `num_results` number of results for the given search query + and ignores the first `offset` number of results + (num_results and offset are specified in SKContext) + """ + + _connector: "ConnectorBase" + + def __init__(self, connector: "ConnectorBase") -> None: + self._connector = connector + + @sk_function( + description="Performs a web search for a given query", name="searchAsync" + ) + @sk_function_context_parameter( + name="num_results", + description="The number of search results to return", + default_value="1", + ) + @sk_function_context_parameter( + name="offset", + description="The number of search results to skip", + default_value="0", + ) + async def search_async(self, query: str, context: SKContext) -> str: + """ + Returns the search results of the query provided. + Returns `num_results` results and ignores the first `offset`. + + :param query: search query + :param context: contains the context of count and offset parameters + :return: stringified list of search results + """ + + _, _num_results = context.variables.get("num_results") + _, _offset = context.variables.get("offset") + result = await self._connector.search_async(query, _num_results, _offset) + return str(result) diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py index c00a59525590..236464aa3c79 100644 --- a/python/semantic_kernel/kernel.py +++ b/python/semantic_kernel/kernel.py @@ -5,7 +5,7 @@ import inspect import os from logging import Logger -from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, cast from uuid import uuid4 from semantic_kernel.connectors.ai.ai_exception import AIException @@ -35,6 +35,7 @@ PassThroughWithoutRetry, ) from semantic_kernel.reliability.retry_mechanism_base import RetryMechanismBase +from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate from semantic_kernel.semantic_functions.prompt_template import PromptTemplate from semantic_kernel.semantic_functions.prompt_template_config import ( PromptTemplateConfig, @@ -133,6 +134,110 @@ def register_semantic_function( return function + async def run_stream_async( + self, + *functions: Any, + input_context: Optional[SKContext] = None, + input_vars: Optional[ContextVariables] = None, + input_str: Optional[str] = None, + ): + if len(functions) > 1: + pipeline_functions = functions[:-1] + stream_function = functions[-1] + + # run pipeline functions + context = await self.run_async( + pipeline_functions, input_context, input_vars, input_str + ) + + elif len(functions) == 1: + stream_function = functions[0] + # if the user passed in a context, prioritize it, but merge with any other inputs + if input_context is not None: + context = input_context + if input_vars is not None: + context._variables = input_vars.merge_or_overwrite( + new_vars=context._variables, overwrite=False + ) + + if input_str is not None: + context._variables = ContextVariables(input_str).merge_or_overwrite( + new_vars=context._variables, overwrite=False + ) + + # if the user did not pass in a context, prioritize an input string, + # and merge that with input context variables + else: + if input_str is not None and input_vars is None: + variables = ContextVariables(input_str) + elif input_str is None and input_vars is not None: + variables = input_vars + elif input_str is not None and input_vars is not None: + variables = ContextVariables(input_str) + variables = variables.merge_or_overwrite( + new_vars=input_vars, overwrite=False + ) + else: + variables = ContextVariables() + context = SKContext( + variables, + self._memory, + self._skill_collection.read_only_skill_collection, + self._log, + ) + else: + raise ValueError("No functions passed to run") + + try: + client: ChatCompletionClientBase | TextCompletionClientBase + client = stream_function._ai_service + + # Get the closure variables from function for finding function_config + closure_vars = stream_function._function.__closure__ + for var in closure_vars: + if isinstance(var.cell_contents, SemanticFunctionConfig): + function_config = var.cell_contents + break + + if function_config.has_chat_prompt: + as_chat_prompt = cast( + ChatPromptTemplate, function_config.prompt_template + ) + + # Similar to non-chat, render prompt (which renders to a + # list of messages) + completion = "" + messages = await as_chat_prompt.render_messages_async(context) + async for steam_message in client.complete_chat_stream_async( + messages, stream_function._chat_request_settings + ): + completion += steam_message + yield steam_message + + # Add the last message from the rendered chat prompt + # (which will be the user message) and the response + # from the model (the assistant message) + _, content = messages[-1] + as_chat_prompt.add_user_message(content) + as_chat_prompt.add_assistant_message(completion) + + # Update context + context.variables.update(completion) + + else: + completion = "" + prompt = await function_config.prompt_template.render_async(context) + async for stream_message in client.complete_stream_async( + prompt, stream_function._ai_request_settings + ): + completion += stream_message + yield stream_message + context.variables.update(completion) + + except Exception as e: + # TODO: "critical exceptions" + context.fail(str(e), e) + async def run_async( self, *functions: Any, @@ -153,7 +258,8 @@ async def run_async( new_vars=context._variables, overwrite=False ) - # if the user did not pass in a context, prioritize an input string, and merge that with input context variables + # if the user did not pass in a context, prioritize an input string, + # and merge that with input context variables else: if input_str is not None and input_vars is None: variables = ContextVariables(input_str) diff --git a/python/semantic_kernel/kernel_exception.py b/python/semantic_kernel/kernel_exception.py index 47b4313bcd85..910fce76ac4f 100644 --- a/python/semantic_kernel/kernel_exception.py +++ b/python/semantic_kernel/kernel_exception.py @@ -24,8 +24,10 @@ class ErrorCodes(Enum): ServiceNotFound = 6 # Skill collection not set. SkillCollectionNotSet = 7 + # Represents an error that occurs when invoking a function. + FunctionInvokeError = 8 # Ambiguous implementation. - AmbiguousImplementation = 8 + AmbiguousImplementation = 9 # The error code. _error_code: ErrorCodes diff --git a/python/semantic_kernel/planning/plan.py b/python/semantic_kernel/planning/plan.py index 90c0e2f851bd..d20217b4809d 100644 --- a/python/semantic_kernel/planning/plan.py +++ b/python/semantic_kernel/planning/plan.py @@ -1,17 +1,432 @@ -from typing import Any +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import re +import threading +from logging import Logger +from typing import Any, Callable, List, Optional -class Plan: - # The goal that wants to be achieved - goal: str +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai import CompleteRequestSettings +from semantic_kernel.connectors.ai.text_completion_client_base import ( + TextCompletionClientBase, +) +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.skill_definition.function_view import FunctionView +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) - # The prompt to be used to generate the plan - prompt: str - # The generated plan that consists of a list of steps to complete the goal - generated_plan: Any +class Plan(SKFunctionBase): + _state: ContextVariables + _steps: List["Plan"] + _function: SKFunctionBase + _parameters: ContextVariables + _outputs: List[str] + _has_next_step: bool + _next_step_index: int + _name: str + _skill_name: str + _description: str + _is_semantic: bool + _request_settings: CompleteRequestSettings + DEFAULT_RESULT_KEY = "PLAN.RESULT" - def __init__(self, goal, prompt, plan): - self.goal = goal - self.prompt = prompt - self.generated_plan = plan + @property + def name(self) -> str: + return self._name + + @property + def state(self) -> ContextVariables: + return self._state + + @property + def skill_name(self) -> str: + return self._skill_name + + @property + def description(self) -> str: + return self._description + + @property + def function(self) -> Callable[..., Any]: + return self._function + + @property + def parameters(self) -> ContextVariables: + return self._parameters + + @property + def is_semantic(self) -> bool: + return self._is_semantic + + @property + def is_native(self) -> bool: + if self._is_semantic is None: + return None + else: + return not self._is_semantic + + @property + def request_settings(self) -> CompleteRequestSettings: + return self._request_settings + + @property + def has_next_step(self) -> bool: + return self._next_step_index < len(self._steps) + + @property + def next_step_index(self) -> int: + return self._next_step_index + + def __init__( + self, + name: Optional[str] = None, + skill_name: Optional[str] = None, + description: Optional[str] = None, + next_step_index: Optional[int] = None, + state: Optional[ContextVariables] = None, + parameters: Optional[ContextVariables] = None, + outputs: Optional[List[str]] = None, + steps: Optional[List["Plan"]] = None, + function: Optional[SKFunctionBase] = None, + ) -> None: + self._name = "" if name is None else name + self._skill_name = "" if skill_name is None else skill_name + self._description = "" if description is None else description + self._next_step_index = 0 if next_step_index is None else next_step_index + self._state = ContextVariables() if state is None else state + self._parameters = ContextVariables() if parameters is None else parameters + self._outputs = [] if outputs is None else outputs + self._steps = [] if steps is None else steps + self._has_next_step = len(self._steps) > 0 + self._is_semantic = None + self._function = None if function is None else function + self._request_settings = None + + if function is not None: + self.set_function(function) + + async def invoke_async( + self, + input: Optional[str] = None, + context: Optional[SKContext] = None, + settings: Optional[CompleteRequestSettings] = None, + memory: Optional[SemanticTextMemoryBase] = None, + logger: Optional[Logger] = None, + # TODO: cancellation_token: CancellationToken, + ) -> SKContext: + if input is not None: + self._state.update(input) + + if context is None: + context = SKContext( + variables=self._state, + skill_collection=None, + memory=memory, + logger=logger, + ) + + if self._function is not None: + result = await self._function.invoke_async( + context=context, settings=settings + ) + if result.error_occurred: + result.log.error( + msg="Something went wrong in plan step {0}.{1}:'{2}'".format( + self._skill_name, self._name, context.last_error_description + ) + ) + return result + context.variables.update(result.result) + else: + # loop through steps until completion + while self.has_next_step: + function_context = context + self.add_variables_to_context(self._state, function_context) + await self.invoke_next_step(function_context) + self.update_context_with_outputs(context) + + return context + + def invoke( + self, + input: Optional[str] = None, + context: Optional[SKContext] = None, + settings: Optional[CompleteRequestSettings] = None, + memory: Optional[SemanticTextMemoryBase] = None, + logger: Optional[Logger] = None, + ) -> SKContext: + if input is not None: + self._state.update(input) + + if context is None: + context = SKContext( + variables=self._state, + skill_collection=None, + memory=memory, + logger=logger, + ) + + if self._function is not None: + result = self._function.invoke(context=context, settings=settings) + if result.error_occurred: + result.log.error( + result.last_exception, + "Something went wrong in plan step {0}.{1}:'{2}'".format( + self.skill_name, self.name, context.last_error_description + ), + ) + return result + context.variables.update(result.result) + else: + # loop through steps until completion + while self.has_next_step: + # Check if there is an event loop + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + function_context = context + self.add_variables_to_context(self._state, function_context) + + # Handle "asyncio.run() cannot be called from a running event loop" + if loop and loop.is_running(): + self._runThread(self.invoke_next_step(function_context)) + else: + asyncio.run(self.invoke_next_step(function_context)) + self.update_context_with_outputs(context) + return context + + def set_ai_configuration( + self, + settings: CompleteRequestSettings, + ) -> SKFunctionBase: + if self._function is not None: + self._function.set_ai_configuration(settings) + + def set_ai_service( + self, service: Callable[[], TextCompletionClientBase] + ) -> SKFunctionBase: + if self._function is not None: + self._function.set_ai_service(service) + + def set_default_skill_collection( + self, + skills: ReadOnlySkillCollectionBase, + ) -> SKFunctionBase: + if self._function is not None: + self._function.set_default_skill_collection(skills) + + def describe(self) -> FunctionView: + return self._function.describe() + + def set_available_functions(self, plan: "Plan", context: SKContext) -> "Plan": + if len(plan.steps) == 0: + if context.skills is None: + raise KernelException( + KernelException.ErrorCodes.SkillCollectionNotSet, + "Skill collection not found in the context", + ) + try: + skillFunction = context.skills.get_function(plan.skill_name, plan.name) + plan.set_function(skillFunction) + except Exception: + pass + else: + for step in plan.steps: + step = self.set_available_functions(step, context) + + return plan + + def add_steps(self, steps: Optional[List[SKFunctionBase]]) -> None: + for step in steps: + if type(step) is Plan: + self._steps.append(step) + else: + new_step = Plan( + name=step.name, + skill_name=step.skill_name, + description=step.description, + next_step_index=0, + state=ContextVariables(), + parameters=ContextVariables(), + outputs=[], + steps=[], + ) + new_step.set_function(step) + self._steps.append(new_step) + + def set_function(self, function: SKFunctionBase) -> None: + self._function = function + self._name = function.name + self._skill_name = function.skill_name + self._description = function.description + self._is_semantic = function.is_semantic + self._request_settings = function.request_settings + + async def run_next_step_async( + self, + kernel: Kernel, + variables: ContextVariables, + ) -> "Plan": + context = kernel.create_new_context(variables) + return await self.invoke_next_step(context) + + async def invoke_next_step(self, context: SKContext) -> "Plan": + if self.has_next_step: + step = self._steps[self._next_step_index] + + # merge the state with the current context variables for step execution + variables = self.get_next_step_variables(context.variables, step) + + # Invoke the step + func_context = SKContext( + variables=variables, + memory=context._memory, + skill_collection=context.skills, + logger=context.log, + ) + result = await step.invoke_async(context=func_context) + result_value = result.result + + if result.error_occurred: + raise KernelException( + KernelException.ErrorCodes.FunctionInvokeError, + "Error occurred while running plan step: " + + result.last_error_description, + result.last_exception, + ) + + # Update state with result + self.state.update(result_value) + + # Update plan result in state with matching outputs (if any) + if set(self._outputs).intersection(set(step._outputs)): + current_plan_result = "" + if Plan.DEFAULT_RESULT_KEY in self._state._variables: + current_plan_result = self._state[Plan.DEFAULT_RESULT_KEY] + self._state.set( + Plan.DEFAULT_RESULT_KEY, current_plan_result.strip() + result_value + ) + + # Update state with outputs (if any) + for output in step._outputs: + if output in result.variables._variables: + self._state.set(output, result.variables[output]) + else: + self._state.set(output, result_value) + + # Increment the step + self._next_step_index += 1 + + return self + + def add_variables_to_context( + self, variables: ContextVariables, context: SKContext + ) -> None: + for key in variables._variables: + if not context.variables.contains_key(key): + context.variables.set(key, variables[key]) + + def update_context_with_outputs(self, context: SKContext) -> None: + result_string = "" + if Plan.DEFAULT_RESULT_KEY in self._state._variables: + result_string = self._state[Plan.DEFAULT_RESULT_KEY] + else: + result_string = str(self._state) + + context.variables.update(result_string) + + for item in self._steps[self._next_step_index - 1]._outputs: + if item in self._state: + context.variables.set(item, self._state[item]) + else: + context.variables.set(item, result_string) + + return context + + def get_next_step_variables( + self, variables: ContextVariables, step: "Plan" + ) -> ContextVariables: + # Priority for Input + # - Parameters (expand from variables if needed) + # - SKContext.Variables + # - Plan.State + # - Empty if sending to another plan + # - Plan.Description + input_string = "" + if step._parameters["input"] is not None: + input_string = self.expand_from_variables( + variables, step._parameters["input"] + ) + elif variables["input"] is not None: + input_string = variables["input"] + elif self._state["input"] is not None: + input_string = self._state["input"] + elif len(step._steps) > 0: + input_string = "" + elif self._description is not None: + input_string = self._description + + step_variables = ContextVariables(input_string) + + # Priority for remaining stepVariables is: + # - Function Parameters (pull from variables or state by a key value) + # - Step Parameters (pull from variables or state by a key value) + function_params = step.describe() + for param in function_params._parameters: + if param.name.lower == "input": + continue + if step_variables.contains_key(param.name): + step_variables.set(param.name, variables[param.name]) + elif ( + self._state.contains_key(param.name) + and self._state[param.name] is not None + ): + step_variables.set(param.name, self._state[param.name]) + + for param_var in step.parameters._variables: + if step_variables.contains_key(param_var): + continue + + expanded_value = self.expand_from_variables(variables, param_var) + if expanded_value.lower() == param_var.lower(): + step_variables.set(param_var, expanded_value) + elif variables.contains_key(param_var): + step_variables.set(param_var, variables[param_var]) + elif self._state.contains_key(param_var): + step_variables.set(param_var, self._state[param_var]) + else: + step_variables.set(param_var, expanded_value) + + return step_variables + + def expand_from_variables( + self, variables: ContextVariables, input_string: str + ) -> str: + result = input_string + variables_regex = r"\$(?P\w+)" + matches = re.findall(variables_regex, input_string) + ordered_matches = sorted( + matches, key=lambda m: len(m.group("var")), reverse=True + ) + + for match in ordered_matches: + var_name = match.group("var") + if variables.contains_key(var_name): + result = result.replace(f"${var_name}", variables[var_name]) + + return result + + def _runThread(self, code: Callable): + result = [] + thread = threading.Thread(target=self._runCode, args=(code, result)) + thread.start() + thread.join() + return result[0] diff --git a/python/semantic_kernel/text/text_chunker.py b/python/semantic_kernel/text/text_chunker.py index 2ea30d9e7454..f0197649d4e8 100644 --- a/python/semantic_kernel/text/text_chunker.py +++ b/python/semantic_kernel/text/text_chunker.py @@ -124,7 +124,7 @@ def split_markdown_paragraph( def _split_text_paragraph( - text: List[str], max_tokens: int, token_counter: Callable + text: List[str], max_tokens: int, token_counter: Callable = _token_counter ) -> List[str]: """ Split text into paragraphs. @@ -176,7 +176,10 @@ def _split_text_paragraph( def _split_markdown_lines( - text: str, max_token_per_line: int, trim: bool, token_counter: Callable + text: str, + max_token_per_line: int, + trim: bool, + token_counter: Callable = _token_counter, ) -> List[str]: """ Split markdown into lines. @@ -192,7 +195,10 @@ def _split_markdown_lines( def _split_text_lines( - text: str, max_token_per_line: int, trim: bool, token_counter: Callable + text: str, + max_token_per_line: int, + trim: bool, + token_counter: Callable = _token_counter, ) -> List[str]: """ Split text into lines. @@ -212,7 +218,7 @@ def _split_str_lines( max_tokens: int, separators: List[List[str]], trim: bool, - token_counter: Callable, + token_counter: Callable = _token_counter, ) -> List[str]: if not text: return [] @@ -248,7 +254,7 @@ def _split_str( max_tokens: int, separators: List[str], trim: bool, - token_counter: Callable, + token_counter: Callable = _token_counter, ) -> Tuple[List[str], bool]: """ Split text into lines. @@ -309,7 +315,7 @@ def _split_list( max_tokens: int, separators: List[str], trim: bool, - token_counter: Callable, + token_counter: Callable = _token_counter, ) -> Tuple[List[str], bool]: """ Split list of string into lines. diff --git a/python/tests/integration/completions/test_azure_oai_chat_service.py b/python/tests/integration/completions/test_azure_oai_chat_service.py index 60dec17faf78..3e93161efdd5 100644 --- a/python/tests/integration/completions/test_azure_oai_chat_service.py +++ b/python/tests/integration/completions/test_azure_oai_chat_service.py @@ -45,3 +45,46 @@ async def test_azure_e2e_chat_completion_with_skill( "human" in output or "Human" in output or "preserve" in output ) assert len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_chat_stream_service_with_skills( + setup_tldr_function_for_oai_models, get_aoai_config +): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + _, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAIChat__DeploymentName"] + else: + deployment_name = "gpt-35-turbo" + + print("* Service: Azure OpenAI Chat Completion") + print(f"* Endpoint: {endpoint}") + print(f"* Deployment: {deployment_name}") + + # Configure LLM service + kernel.add_chat_service( + "chat_completion", + sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key), + ) + + # Create the semantic function + tldr_function = kernel.create_semantic_function( + sk_prompt, max_tokens=200, temperature=0, top_p=0.5 + ) + + result = [] + async for message in kernel.run_stream_async( + tldr_function, input_str=text_to_summarize + ): + result.append(message) + output = "".join(result).strip() + + print(f"TLDR using input string: '{output}'") + assert len(result) > 1 + assert "First Law" not in output and ( + "human" in output or "Human" in output or "preserve" in output + ) + assert len(output) < 100 diff --git a/python/tests/integration/completions/test_azure_oai_text_service.py b/python/tests/integration/completions/test_azure_oai_text_service.py index b41abe6f2a19..ced9bf629df1 100644 --- a/python/tests/integration/completions/test_azure_oai_text_service.py +++ b/python/tests/integration/completions/test_azure_oai_text_service.py @@ -45,3 +45,46 @@ async def test_azure_e2e_text_completion_with_skill( "human" in output or "Human" in output or "preserve" in output ) assert len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_text_stream_completion_with_skills( + setup_tldr_function_for_oai_models, get_aoai_config +): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + _, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAI__DeploymentName"] + else: + deployment_name = "text-davinci-003" + + print("* Service: Azure OpenAI Text Completion") + print(f"* Endpoint: {endpoint}") + print(f"* Deployment: {deployment_name}") + + # Configure LLM service + kernel.add_text_completion_service( + "text_completion", + sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key), + ) + + # Create the semantic function + tldr_function = kernel.create_semantic_function( + sk_prompt, max_tokens=200, temperature=0, top_p=0.5 + ) + + result = [] + async for message in kernel.run_stream_async( + tldr_function, input_str=text_to_summarize + ): + result.append(message) + output = "".join(result).strip() + + print(f"TLDR using input string: '{output}'") + assert len(result) > 1 + assert "First Law" not in output and ( + "human" in output or "Human" in output or "preserve" in output + ) + assert len(output) < 100 diff --git a/python/tests/integration/connectors/memory/test_azure_cognitive_search.py b/python/tests/integration/connectors/memory/test_azure_cognitive_search.py new file mode 100644 index 000000000000..cb05dc189fcc --- /dev/null +++ b/python/tests/integration/connectors/memory/test_azure_cognitive_search.py @@ -0,0 +1,109 @@ +# Copyright (c) Microsoft. All rights reserved. + +import time +from random import randint + +import numpy as np +import pytest + +from semantic_kernel.connectors.memory.azure_cognitive_search.azure_cognitive_search_memory_store import ( + AzureCognitiveSearchMemoryStore, +) +from semantic_kernel.memory.memory_record import MemoryRecord + +try: + azure_cognitive_search_installed = True +except ImportError: + azure_cognitive_search_installed = False + +pytestmark = pytest.mark.skipif( + not azure_cognitive_search_installed, + reason="Azure Cognitive Search is not installed", +) + + +@pytest.fixture +def memory_store(): + yield AzureCognitiveSearchMemoryStore(vector_size=4) + + +def test_constructor(): + test_endpoint = "https://test-endpoint.search.windows.net" + memory = AzureCognitiveSearchMemoryStore(test_endpoint) + assert memory._search_index_client is not None + + +@pytest.mark.asyncio +async def test_collections(memory_store): + n = randint(1000, 9999) + collection = f"int-tests-{n}" + await memory_store.create_collection_async(collection) + time.sleep(1) + try: + assert await memory_store.does_collection_exist_async(collection) + except: + await memory_store.delete_collection_async(collection) + raise + + await memory_store.delete_collection_async(collection) + time.sleep(1) + assert not await memory_store.does_collection_exist_async(collection) + + +@pytest.mark.asyncio +async def test_upsert(memory_store): + n = randint(1000, 9999) + collection = f"int-tests-{n}" + await memory_store.create_collection_async(collection) + time.sleep(1) + try: + assert await memory_store.does_collection_exist_async(collection) + rec = MemoryRecord( + is_reference=False, + external_source_name=None, + id=None, + description="some description", + text="some text", + additional_metadata=None, + embedding=np.array([0.2, 0.1, 0.2, 0.7]), + ) + await memory_store.upsert_async(collection, rec) + time.sleep(1) + result = await memory_store.get_async(collection, rec._id) + assert result._id == rec._id + assert result._text == rec._text + except: + await memory_store.delete_collection_async(collection) + raise + + await memory_store.delete_collection_async(collection) + + +@pytest.mark.asyncio +async def test_search(memory_store): + n = randint(1000, 9999) + collection = f"int-tests-{n}" + await memory_store.create_collection_async(collection) + time.sleep(1) + try: + assert await memory_store.does_collection_exist_async(collection) + rec = MemoryRecord( + is_reference=False, + external_source_name=None, + id=None, + description="some description", + text="some text", + additional_metadata=None, + embedding=np.array([0.1, 0.2, 0.3, 0.4]), + ) + await memory_store.upsert_async(collection, rec) + time.sleep(1) + result = await memory_store.get_nearest_match_async( + collection, np.array([0.1, 0.2, 0.3, 0.38]) + ) + assert result[0]._id == rec._id + except: + await memory_store.delete_collection_async(collection) + raise + + await memory_store.delete_collection_async(collection) diff --git a/python/tests/integration/connectors/memory/test_milvus.py b/python/tests/integration/connectors/memory/test_milvus.py new file mode 100644 index 000000000000..a55f6e6bee47 --- /dev/null +++ b/python/tests/integration/connectors/memory/test_milvus.py @@ -0,0 +1,240 @@ +# Copyright (c) Microsoft. All rights reserved. + +import platform + +import numpy as np +import pytest + +from semantic_kernel.connectors.memory.milvus import MilvusMemoryStore +from semantic_kernel.memory.memory_record import MemoryRecord + +try: + from milvus import default_server # noqa: F401 + + milvus_installed = True +except ImportError: + milvus_installed = False + +pytestmark = pytest.mark.skipif( + not milvus_installed, reason="local milvus is not installed" +) + +pytestmark = pytest.mark.skipif( + platform.system() == "Windows", + reason="local milvus is not officially supported on Windows", +) + + +@pytest.fixture(scope="module") +def setup_milvus(): + default_server.cleanup() + default_server.start() + host = "http://127.0.0.1:" + str(default_server.listen_port) + port = None + yield host, port + default_server.stop() + default_server.cleanup() + + +@pytest.fixture +def memory_record1(): + return MemoryRecord( + id="test_id1", + text="sample text1", + is_reference=False, + embedding=np.array([0.5, 0.5]), + description="description", + external_source_name="external source", + additional_metadata="additional metadata", + timestamp="timestamp", + ) + + +@pytest.fixture +def memory_record2(): + return MemoryRecord( + id="test_id2", + text="sample text2", + is_reference=False, + embedding=np.array([0.25, 0.75]), + description="description", + external_source_name="external source", + additional_metadata="additional metadata", + timestamp="timestamp", + ) + + +@pytest.mark.asyncio +async def test_create_and_get_collection_async(setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + result = await memory.get_collections_async() + assert result == ["test_collection"] + + +@pytest.mark.asyncio +async def test_get_collections_async(setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection1", 2) + await memory.create_collection_async("test_collection2", 2) + await memory.create_collection_async("test_collection3", 2) + result = await memory.get_collections_async() + assert len(result) == 3 + + +@pytest.mark.asyncio +async def test_delete_collection_async(setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + await memory.delete_collection_async("test_collection", 2) + result = await memory.get_collections_async() + assert len(result) == 0 + + await memory.create_collection_async("test_collection", 2) + await memory.delete_collection_async("TEST_COLLECTION", 2) + result = await memory.get_collections_async() + assert len(result) == 0 + + +@pytest.mark.asyncio +async def test_does_collection_exist_async(setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + result = await memory.does_collection_exist_async("test_collection") + assert result is True + + result = await memory.does_collection_exist_async("TEST_COLLECTION") + assert result is False + + +@pytest.mark.asyncio +async def test_upsert_and_get_async(memory_record1, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + + await memory.create_collection_async("test_collection", 2) + await memory.upsert_async("test_collection", memory_record1) + + result = await memory.get_async("test_collection", "test_id1", True) + assert result._id == "test_id1" + assert result._text == "sample text1" + assert result._is_reference is False + assert np.array_equal(result.embedding, np.array([0.5, 0.5])) + assert result._description == "description" + assert result._external_source_name == "external source" + assert result._additional_metadata == "additional metadata" + assert result._timestamp == "timestamp" + + +@pytest.mark.asyncio +async def test_upsert_and_get_async_with_no_embedding(memory_record1, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + + await memory.upsert_async("test_collection", memory_record1) + + result = await memory.get_async("test_collection", "test_id1", False) + assert result._id == "test_id1" + assert result._text == "sample text1" + assert result._is_reference is False + assert result.embedding is None + assert result._description == "description" + assert result._external_source_name == "external source" + assert result._additional_metadata == "additional metadata" + assert result._timestamp == "timestamp" + + +@pytest.mark.asyncio +async def test_upsert_and_get_batch_async(memory_record1, memory_record2, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + + await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + + result = await memory.get_batch_async( + "test_collection", ["test_id1", "test_id2"], True + ) + assert len(result) == 2 + assert result[0]._id == "test_id1" + assert result[0]._text == "sample text1" + assert result[0]._is_reference is False + assert np.array_equal(result[0].embedding, np.array([0.5, 0.5])) + assert result[0]._description == "description" + assert result[0]._external_source_name == "external source" + assert result[0]._additional_metadata == "additional metadata" + assert result[0]._timestamp == "timestamp" + + +@pytest.mark.asyncio +async def test_remove_async(memory_record1, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + + await memory.upsert_async("test_collection", memory_record1) + await memory.remove_async("test_collection", "test_id1") + + # memory.get_async should raise Exception if record is not found + with pytest.raises(Exception): + await memory.get_async("test_collection", "test_id1", True) + + +@pytest.mark.asyncio +async def test_remove_batch_async(memory_record1, memory_record2, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + + await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.remove_batch_async("test_collection", ["test_id1", "test_id2"]) + + result = await memory.get_batch_async( + "test_collection", ["test_id1", "test_id2"], True + ) + assert result == [] + + +@pytest.mark.asyncio +async def test_get_nearest_matches_async(memory_record1, memory_record2, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + results = await memory.get_nearest_matches_async( + "test_collection", np.array([0.5, 0.5]), limit=2 + ) + assert len(results) == 2 + assert isinstance(results[0][0], MemoryRecord) + assert results[0][1] == pytest.approx(0.5, abs=1e-5) + + +@pytest.mark.asyncio +async def test_get_nearest_match_async(memory_record1, memory_record2, setup_milvus): + URI, TOKEN = setup_milvus + memory = MilvusMemoryStore(uri=URI, token=TOKEN) + await memory.delete_collection_async(all=True) + await memory.create_collection_async("test_collection", 2) + await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + + result = await memory.get_nearest_match_async( + "test_collection", np.array([0.5, 0.5]) + ) + assert len(result) == 2 + assert isinstance(result[0], MemoryRecord) + assert result[1] == pytest.approx(0.5, abs=1e-5) diff --git a/python/tests/integration/connectors/memory/test_pinecone.py b/python/tests/integration/connectors/memory/test_pinecone.py index f73a91617aea..ee45aacac00a 100644 --- a/python/tests/integration/connectors/memory/test_pinecone.py +++ b/python/tests/integration/connectors/memory/test_pinecone.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import os +import time import numpy as np import pytest @@ -21,6 +22,24 @@ ) +async def retry(func, retries=5): + for i in range(retries): + try: + return await func() + except pinecone.core.client.exceptions.ForbiddenException as e: + print(e) + time.sleep(i * 2) + except pinecone.core.client.exceptions.ServiceException as e: + print(e) + time.sleep(i * 2) + + +@pytest.fixture(autouse=True, scope="module") +def slow_down_tests(): + yield + time.sleep(1) + + @pytest.fixture(scope="session") def get_pinecone_config(): if "Python_Integration_Tests" in os.environ: @@ -86,8 +105,8 @@ async def test_create_and_get_collection_async(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - result = await memory.describe_collection_async("test-collection") + await retry(lambda: memory.create_collection_async("test-collection")) + result = await retry(lambda: memory.describe_collection_async("test-collection")) assert result is not None assert result.name == "test-collection" @@ -97,8 +116,8 @@ async def test_get_collections_async(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection", 2) - result = await memory.get_collections_async() + await retry(lambda: memory.create_collection_async("test-collection", 2)) + result = await retry(lambda: memory.get_collections_async()) assert "test-collection" in result @@ -107,9 +126,9 @@ async def test_delete_collection_async(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.delete_collection_async("test-collection") - result = await memory.get_collections_async() + await retry(lambda: memory.create_collection_async("test-collection")) + await retry(lambda: memory.delete_collection_async("test-collection")) + result = await retry(lambda: memory.get_collections_async()) assert "test-collection" not in result @@ -118,8 +137,8 @@ async def test_does_collection_exist_async(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - result = await memory.does_collection_exist_async("test-collection") + await retry(lambda: memory.create_collection_async("test-collection")) + result = await retry(lambda: memory.does_collection_exist_async("test-collection")) assert result is True @@ -128,13 +147,15 @@ async def test_upsert_async_and_get_async(get_pinecone_config, memory_record1): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_async("test-collection", memory_record1) + await retry(lambda: memory.create_collection_async("test-collection")) + await retry(lambda: memory.upsert_async("test-collection", memory_record1)) - result = await memory.get_async( - "test-collection", - memory_record1._id, - with_embedding=True, + result = await retry( + lambda: memory.get_async( + "test-collection", + memory_record1._id, + with_embedding=True, + ) ) assert result is not None @@ -151,13 +172,19 @@ async def test_upsert_batch_async_and_get_batch_async( api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_batch_async("test-collection", [memory_record1, memory_record2]) + await retry(lambda: memory.create_collection_async("test-collection")) + await retry( + lambda: memory.upsert_batch_async( + "test-collection", [memory_record1, memory_record2] + ) + ) - results = await memory.get_batch_async( - "test-collection", - [memory_record1._id, memory_record2._id], - with_embeddings=True, + results = await retry( + lambda: memory.get_batch_async( + "test-collection", + [memory_record1._id, memory_record2._id], + with_embeddings=True, + ) ) assert len(results) >= 2 @@ -170,9 +197,9 @@ async def test_remove_async(get_pinecone_config, memory_record1): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_async("test-collection", memory_record1) - await memory.remove_async("test-collection", memory_record1._id) + await retry(lambda: memory.create_collection_async("test-collection")) + await retry(lambda: memory.upsert_async("test-collection", memory_record1)) + await retry(lambda: memory.remove_async("test-collection", memory_record1._id)) with pytest.raises(KeyError): _ = await memory.get_async( @@ -185,10 +212,16 @@ async def test_remove_batch_async(get_pinecone_config, memory_record1, memory_re api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_batch_async("test-collection", [memory_record1, memory_record2]) - await memory.remove_batch_async( - "test-collection", [memory_record1._id, memory_record2._id] + await retry(lambda: memory.create_collection_async("test-collection")) + await retry( + lambda: memory.upsert_batch_async( + "test-collection", [memory_record1, memory_record2] + ) + ) + await retry( + lambda: memory.remove_batch_async( + "test-collection", [memory_record1._id, memory_record2._id] + ) ) with pytest.raises(KeyError): @@ -209,14 +242,23 @@ async def test_get_nearest_match_async( api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_batch_async("test-collection", [memory_record1, memory_record2]) + await retry(lambda: memory.create_collection_async("test-collection")) + await retry( + lambda: memory.upsert_batch_async( + "test-collection", [memory_record1, memory_record2] + ) + ) test_embedding = memory_record1.embedding test_embedding[0] = test_embedding[0] + 0.01 - result = await memory.get_nearest_match_async( - "test-collection", test_embedding, min_relevance_score=0.0, with_embedding=True + result = await retry( + lambda: memory.get_nearest_match_async( + "test-collection", + test_embedding, + min_relevance_score=0.0, + with_embedding=True, + ) ) assert result is not None @@ -230,20 +272,24 @@ async def test_get_nearest_matches_async( api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await memory.create_collection_async("test-collection") - await memory.upsert_batch_async( - "test-collection", [memory_record1, memory_record2, memory_record3] + await retry(lambda: memory.create_collection_async("test-collection")) + await retry( + lambda: memory.upsert_batch_async( + "test-collection", [memory_record1, memory_record2, memory_record3] + ) ) test_embedding = memory_record2.embedding test_embedding[0] = test_embedding[0] + 0.025 - result = await memory.get_nearest_matches_async( - "test-collection", - test_embedding, - limit=2, - min_relevance_score=0.0, - with_embeddings=True, + result = await retry( + lambda: memory.get_nearest_matches_async( + "test-collection", + test_embedding, + limit=2, + min_relevance_score=0.0, + with_embeddings=True, + ) ) assert len(result) == 2 diff --git a/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py b/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py index c982e8c8aaa2..66161da9569d 100644 --- a/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py +++ b/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py @@ -154,7 +154,7 @@ async def test_azure_chat_completion_call_with_parameters() -> None: frequency_penalty=complete_request_settings.frequency_penalty, n=complete_request_settings.number_of_responses, stream=False, - logit_bias=None, + logit_bias={}, ) diff --git a/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py b/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py index 9457c438b8b0..1fed027969bf 100644 --- a/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py +++ b/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py @@ -153,7 +153,7 @@ async def test_azure_text_completion_call_with_parameters() -> None: stop=None, n=complete_request_settings.number_of_responses, stream=False, - logit_bias=None, + logit_bias={}, ) diff --git a/python/tests/unit/planning/test_plan_creation.py b/python/tests/unit/planning/test_plan_creation.py new file mode 100644 index 000000000000..1fdc740dcbbd --- /dev/null +++ b/python/tests/unit/planning/test_plan_creation.py @@ -0,0 +1,192 @@ +# Copyright (c) Microsoft. All rights reserved. + +import semantic_kernel as sk +from semantic_kernel.core_skills.math_skill import MathSkill +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.planning import Plan + + +def test_create_empty_plan(): + plan = Plan() + assert plan is not None + assert plan.name == "" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "" + assert plan.description == "" + assert plan.function is None + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is False + assert plan.next_step_index == 0 + assert plan._steps == [] + + +def test_create_plan_with_name(): + plan = Plan(name="test") + assert plan is not None + assert plan.name == "test" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "" + assert plan.description == "" + assert plan.function is None + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is False + assert plan.next_step_index == 0 + assert plan._steps == [] + + +def test_create_plan_with_name_and_description(): + plan = Plan(name="test", description="test description") + assert plan is not None + assert plan.name == "test" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "" + assert plan.description == "test description" + assert plan.function is None + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is False + assert plan.next_step_index == 0 + assert plan._steps == [] + + +def test_create_plan_with_state_and_parameters(): + plan = Plan( + name="test", + state=ContextVariables(), + parameters={"test_param": "test_param_val"}, + ) + assert plan is not None + assert plan.name == "test" + assert plan.state["input"] == "" + assert plan.skill_name == "" + assert plan.description == "" + assert plan.function is None + assert plan.parameters["test_param"] == "test_param_val" + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is False + assert plan.next_step_index == 0 + assert plan._steps == [] + + +def test_create_plan_with_name_and_function(): + # create a kernel + kernel = sk.Kernel() + + # import test (math) skill + skill = MathSkill() + skill_config_dict = kernel.import_skill(skill, "math") + + test_function = skill_config_dict["Add"] + + plan = Plan(name="test", function=test_function) + assert plan is not None + assert plan.name == "Add" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "math" + assert plan.description == test_function.description + assert plan.function is test_function + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is test_function.is_semantic + assert plan.is_native is test_function.is_native + assert plan.request_settings == test_function.request_settings + assert plan.has_next_step is False + assert plan.next_step_index == 0 + assert plan._steps == [] + + +def test_create_multistep_plan_with_functions(): + # create a kernel + kernel = sk.Kernel() + + # import test (math) skill + skill = MathSkill() + skill_config_dict = kernel.import_skill(skill, "math") + + test_function1 = skill_config_dict["Add"] + test_function2 = skill_config_dict["Subtract"] + + plan = Plan(name="multistep_test") + plan.add_steps([test_function1, test_function2]) + + assert plan is not None + assert plan.name == "multistep_test" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "" + assert plan.description == "" + assert plan.function is None + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is True + assert plan.next_step_index == 0 + assert len(plan._steps) == 2 + + +def test_create_multistep_plan_with_plans(): + # create a kernel + kernel = sk.Kernel() + + # import test (math) skill + skill = MathSkill() + skill_config_dict = kernel.import_skill(skill, "math") + + test_function1 = skill_config_dict["Add"] + test_function2 = skill_config_dict["Subtract"] + + plan = Plan(name="multistep_test") + plan_step1 = Plan(name="step1", function=test_function1) + plan_step2 = Plan(name="step2", function=test_function2) + plan.add_steps([plan_step1, plan_step2]) + + assert plan is not None + assert plan.name == "multistep_test" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "" + assert plan.description == "" + assert plan.function is None + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is None + assert plan.is_native is None + assert plan.request_settings is None + assert plan.has_next_step is True + assert plan.next_step_index == 0 + assert len(plan._steps) == 2 + + +def test_add_step_to_plan(): + # create a kernel + kernel = sk.Kernel() + + # import test (math) skill + skill = MathSkill() + skill_config_dict = kernel.import_skill(skill, "math") + + test_function1 = skill_config_dict["Add"] + test_function2 = skill_config_dict["Subtract"] + + plan = Plan(name="multistep_test", function=test_function1) + plan.add_steps([test_function2]) + assert plan is not None + assert plan.name == "Add" + assert type(plan.state) is ContextVariables + assert plan.skill_name == "math" + assert plan.description == test_function1.description + assert plan.function is test_function1 + assert type(plan.parameters) is ContextVariables + assert plan.is_semantic is test_function1.is_semantic + assert plan.is_native is test_function1.is_native + assert plan.request_settings == test_function1.request_settings + assert plan.has_next_step is True + assert plan.next_step_index == 0 + assert len(plan._steps) == 1 diff --git a/python/tests/unit/planning/test_plan_execution.py b/python/tests/unit/planning/test_plan_execution.py new file mode 100644 index 000000000000..3e993d656570 --- /dev/null +++ b/python/tests/unit/planning/test_plan_execution.py @@ -0,0 +1,182 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +import semantic_kernel as sk +from semantic_kernel.core_skills.text_skill import TextSkill +from semantic_kernel.planning import Plan + + +def test_invoke_empty_plan(): + plan = Plan() + result = plan.invoke() + assert result.result == "" + + +@pytest.mark.asyncio +async def test_invoke_empty_plan_async(): + plan = Plan() + result = await plan.invoke_async() + assert result.result == "" + + +def test_invoke_plan_constructed_with_function(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test", function=test_function) + result = plan.invoke(context=context) + assert result.result == "HELLO WORLD " + + +@pytest.mark.asyncio +async def test_invoke_plan_constructed_with_function_async(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test", function=test_function) + result = await plan.invoke_async(context=context) + assert result.result == "HELLO WORLD " + + +def test_invoke_empty_plan_with_added_function_step(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + plan.add_steps([test_function]) + result = plan.invoke(context=context) + assert result.result == "HELLO WORLD " + + +@pytest.mark.asyncio +async def test_invoke_empty_plan_with_added_function_step_async(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + plan.add_steps([test_function]) + result = await plan.invoke_async(context=context) + assert result.result == "HELLO WORLD " + + +def test_invoke_empty_plan_with_added_plan_step(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + new_step = Plan(name="test", function=test_function) + plan.add_steps([new_step]) + result = plan.invoke(context=context) + assert result.result == "HELLO WORLD " + + +@pytest.mark.asyncio +async def test_invoke_empty_plan_with_added_plan_step_async(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + new_step = Plan(name="test", function=test_function) + plan.add_steps([new_step]) + result = await plan.invoke_async(context=context) + assert result.result == "HELLO WORLD " + + +def test_invoke_multi_step_plan(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + test_function2 = skill_config_dict["trim_end"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + new_step = Plan(name="test", function=test_function) + new_step2 = Plan(name="test", function=test_function2) + plan.add_steps([new_step, new_step2]) + result = plan.invoke(context=context) + assert result.result == "HELLO WORLD" + + +@pytest.mark.asyncio +async def test_invoke_multi_step_plan_async(): + # create a kernel + kernel = sk.Kernel() + + # import test (text) skill + skill = TextSkill() + skill_config_dict = kernel.import_skill(skill, "text") + test_function = skill_config_dict["uppercase"] + test_function2 = skill_config_dict["trim_end"] + + # setup context + context = kernel.create_new_context() + context["input"] = "hello world " + + plan = Plan(name="test") + new_step = Plan(name="test", function=test_function) + new_step2 = Plan(name="test", function=test_function2) + plan.add_steps([new_step, new_step2]) + result = await plan.invoke_async(context=context) + assert result.result == "HELLO WORLD" diff --git a/samples/apps/copilot-chat-app/webapi/CopilotChat/Controllers/ChatController.cs b/samples/apps/copilot-chat-app/webapi/CopilotChat/Controllers/ChatController.cs index f13b4aaff4f8..ae4d795f2e21 100644 --- a/samples/apps/copilot-chat-app/webapi/CopilotChat/Controllers/ChatController.cs +++ b/samples/apps/copilot-chat-app/webapi/CopilotChat/Controllers/ChatController.cs @@ -16,7 +16,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.AI; using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Reliability; using Microsoft.SemanticKernel.SkillDefinition; using Microsoft.SemanticKernel.Skills.MsGraph; using Microsoft.SemanticKernel.Skills.MsGraph.Connectors; @@ -155,18 +154,8 @@ private async Task RegisterPlannerSkillsAsync(CopilotChatPlanner planner, OpenAp // Klarna Shopping if (openApiSkillsAuthHeaders.KlarnaAuthentication != null) { - // Register the Klarna shopping ChatGPT plugin with the planner's kernel. - using DefaultHttpRetryHandler retryHandler = new(new HttpRetryConfig(), this._logger) - { - InnerHandler = new HttpClientHandler() { CheckCertificateRevocationList = true } - }; - using HttpClient importHttpClient = new(retryHandler, false); - importHttpClient.DefaultRequestHeaders.Add("User-Agent", "Microsoft.CopilotChat"); - await planner.Kernel.ImportChatGptPluginSkillFromUrlAsync("KlarnaShoppingSkill", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), - new OpenApiSkillExecutionParameters - { - HttpClient = importHttpClient, - }); + // Register the Klarna shopping ChatGPT plugin with the planner's kernel. There is no authentication required for this plugin. + await planner.Kernel.ImportChatGptPluginSkillFromUrlAsync("KlarnaShoppingSkill", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenApiSkillExecutionParameters()); } // GitHub diff --git a/samples/apps/copilot-chat-app/webapp/src/components/open-api-plugins/PluginGallery.tsx b/samples/apps/copilot-chat-app/webapp/src/components/open-api-plugins/PluginGallery.tsx index 46c7d21e929a..e2b34617fdb4 100644 --- a/samples/apps/copilot-chat-app/webapp/src/components/open-api-plugins/PluginGallery.tsx +++ b/samples/apps/copilot-chat-app/webapp/src/components/open-api-plugins/PluginGallery.tsx @@ -61,7 +61,7 @@ export const PluginGallery: React.FC = () => { return ( { setOpen(data.open); }}> -